60 ví dụ mã Python được tìm thấy liên quan đến " lưu đầu ra". Bạn có thể bỏ phiếu cho những cái bạn thích hoặc bỏ phiếu cho những cái bạn không thích và chuyển đến dự án gốc hoặc tệp nguồn bằng cách nhấp vào các liên kết phía trên mỗi ví dụ
def get_output_save_steps[self]: """Returns save steps for each clock as boolean values. Returns ------- save_steps : :class:`xarray.Dataset` A new Dataset with boolean data variables for each clock dimension other than the master clock, where values specify whether or not to save outputs at every step of a simulation. """ ds = Dataset[coords={self.master_clock_dim: self.master_clock_coord}] for clock, coord in self.clock_coords.items[]: if clock != self.master_clock_dim: save_steps = np.in1d[self.master_clock_coord.values, coord.values] ds[clock] = [self.master_clock_dim, save_steps] return ds
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]
def save_analysis_output[self, imageId, module_name, module_value, data, module_type=None, directory_data=False]: if not module_type or module_type == 'base': odir = '/'.join[[self.imagerootdir, imageId, "analyzer_output", module_name]] else: odir = '/'.join[[self.imagerootdir, imageId, "analyzer_output_"+module_type, module_name]] if not directory_data: thefile = '/'.join[[odir, module_value]] if not os.path.exists[odir]: os.makedirs[odir] return[anchore_utils.write_kvfile_fromdict[thefile, data]] else: if os.path.isdir[data]: if os.path.isdir[odir]: shutil.rmtree[odir] os.makedirs[odir] shutil.move[data, odir]
def save_output_file[output_file, output_file_name]: """ Save output file. :param output_file_name: Output file name. :type output_file_name: str :param output_file: Output file. :type output_file: io.BytesIO """ output_file.seek[0] os.makedirs[osp.dirname[output_file_name], exist_ok=True] with open[output_file_name, 'wb'] as f: f.write[output_file.read[]] log.info['CO2MPAS output written into [%s].', output_file_name]
________số 8_______
def save_output[index, training_image, prediction, label]: prediction_label = 1 - prediction[0] output_image = copy.copy[training_image] # Save prediction up_color = color_image[prediction[0], 2] scp.misc.imsave['output/decision_%d.png' % index, up_color] # Merge true positive with training images' green channel true_positive = prediction_label * label[..., 0][0] merge_green = [1 - true_positive] * training_image[..., 1] + true_positive * 255 output_image[..., 1] = merge_green # Merge false positive with training images' red channel false_positive = prediction_label * label[..., 1][0] merge_red = [1 - false_positive] * training_image[..., 0] + false_positive * 255 output_image[..., 0] = merge_red # Merge false negative with training images' blue channel false_negative = [1 - prediction_label] * label[..., 0][0] merge_blue = [1 - false_negative] * training_image[..., 2] + false_negative * 255 output_image[..., 2] = merge_blue # Save images scp.misc.imsave['merge/decision_%d.png' % index, output_image]
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]0
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]1
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]2
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]3
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]4
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]5
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]6
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]7
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]8
def save_output[self]: try: output = open[self.output_path,'w'] output.write["# url: {host}\n".format[host=self.host.geturl[]]] output.write["# start: {timestamp}\n".format[timestamp=self.start_time.strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.write["# wordlist: {wordlist}\n".format[wordlist=self.wordlist]] output.write["# extensions: {exts}\n".format[exts=self.extensions]] for x in sorted[self.results,key=lambda x: x.code]: output.write["{code},{url},{length}\n".format[url=x.url,code=x.code,length=x.length]] for e in self.error_log: output.write["# {msg}\n".format[msg=e]] output.write["# stop: {timestamp}\n".format[timestamp=datetime.now[].strftime["%m-%d-%y_%H:%M:%S.%f"]]] output.close[] except Exception as ex: self.terminalw.print_error["Failed creating output file {filename}: {msg}".format[filename=self.output_path,msg=ex]]9
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None0
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None1
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None2
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None3
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None4
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None5
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None6
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None7
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None8
def save_json_output[json]: json_file ="%s.json" % datetime.now[].strftime['[%d-%m-%Y %H:%M:%S]'] json_file_path = "%s/%s" % [json_folder, json_file] try: with open[json_file_path, "w+"] as f: try: f.write[json] except: raise IOError["Could not write in %s" % json_file_path] return None finally: f.close[] except: raise IOError["Could not open %s" % json_file_path] return None try: chmod[json_file_path, 0o666] except: raise IOError["Could not open %s" % json_file_path] return None9
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save0
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save1
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save2
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save3
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save4
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save5
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save6
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save7
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save8
def scrub_output_pre_save[model, **kwargs]: """scrub output before saving notebooks""" # only run on notebooks if model['type'] != 'notebook': return # only run on nbformat v4 if model['content']['nbformat'] != 4: return for cell in model['content']['cells']: if cell['cell_type'] != 'code': continue cell['outputs'] = [] cell['execution_count'] = None # c.FileContentsManager.pre_save_hook = scrub_output_pre_save9
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]0
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]1
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]2
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]3
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]4
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]5
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]6
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]7
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]8
def save_h5_output[h5_filename, seg, segrefine, group, grouppred, label_dtype='uint8']: print[h5_filename] h5_fout = h5py.File[h5_filename] h5_fout.create_dataset[ 'seglabel', data=seg, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'segrefine', data=segrefine, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'pid', data=group, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.create_dataset[ 'predpid', data=grouppred, compression='gzip', compression_opts=1, dtype=label_dtype] h5_fout.close[]9
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]0
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]1
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]2
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]3
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]4
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]5
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]6
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]7
def save_task_output[task, output]: ''' This function is a light wrapper to save a luigi task's output. Instead of writing the output directly onto the output file, we write onto a temporary file and then atomically move the temporary file onto the output file. This defends against situations where we may have accidentally queued multiple instances of a task; if this happens and both tasks try to write to the same file, then the file gets corrupted. But if both of these tasks simply write to separate files and then each perform an atomic move, then the final output file remains uncorrupted. Doing this for more or less every single task in GASpy gots annoying, so we wrapped it. Args: task Instance of a luigi task whose output you want to write to output Whatever object that you want to save ''' with task.output[].temporary_path[] as task.temp_output_path: with open[task.temp_output_path, 'wb'] as file_handle: pickle.dump[output, file_handle]8