text
stringlengths
15
7.82k
ids
sequencelengths
1
7
def METHOD_NAME(self, data): self.stack[-1][2].append(data)
[ 238, 5859, 365 ]
f METHOD_NAME(self, inputs, *args, **kwargs):
[ 128 ]
def METHOD_NAME(self, paths=None): """Return true if dependency is present on 'paths'""" return self.get_version(paths) is not None
[ 137, 2541 ]
def METHOD_NAME(): with patch.dict(chef.__opts__, {"test": True}): yield
[ 9, 854 ]
def METHOD_NAME(title): print() print('*' * 30) print(f'{title}...') print('*' * 30) print()
[ 38, 572 ]
def METHOD_NAME(self): return Frozen(_fix_attributes(self.ds.attributes))
[ 19, 1685 ]
def METHOD_NAME(x, *args): return numpy.cos(x) - x
[ -1 ]
def METHOD_NAME(self): self.num_nodes = 1 self.extra_args = [["-checkaddrman=1"]] # Do addrman checks on all operations.
[ 0, 9, 434 ]
def METHOD_NAME(base_net_oos_with_pumps): net = copy.deepcopy(base_net_oos_with_pumps) j11 = pandapipes.create_junction(net, 1.05, 293.15, name="Junction 11", geodata=(14, 0)) j12 = pandapipes.create_junction(net, 1.05, 293.15, name="Junction 12", geodata=(14, -2)) j10 = net.junction.loc[net.junction.name == "Junction 10"].index[0] pandapipes.create_flow_controls(net, [j10, j10], [j11, j12], 0.5, 0.1, in_service=[True, False]) collections = plot.create_simple_collections(net, plot_sinks=True, plot_sources=True) assert len(collections) == len([comp for comp in net["component_list"] if not net[comp.table_name()].empty]) assert len(collections["junction"].get_paths()) == len(net.junction[net.junction.in_service]) assert len(collections["pipe"].get_paths()) == len(net.pipe[net.pipe.in_service]) assert len(collections["ext_grid"].get_paths()) == len(net.ext_grid[net.ext_grid.in_service]) assert len(collections["source"]) == 2 assert isinstance(collections["source"][0], PatchCollection) assert isinstance(collections["source"][1], LineCollection) assert len(collections["source"][0].get_paths()) == len(net.source[net.source.in_service]) assert len(collections["source"][1].get_paths()) == 3 * len(net.source[net.source.in_service]) assert len(collections["sink"]) == 2 assert isinstance(collections["sink"][0], PatchCollection) assert isinstance(collections["sink"][1], LineCollection) assert len(collections["sink"][0].get_paths()) == len(net.sink[net.sink.in_service]) assert len(collections["sink"][1].get_paths()) == len(net.sink[net.sink.in_service]) assert len(collections["valve"]) == 2 assert isinstance(collections["valve"][0], PatchCollection) assert isinstance(collections["valve"][1], LineCollection) assert len(collections["valve"][0].get_paths()) == 2 * len(net.valve) assert len(collections["valve"][1].get_paths()) == 2 * len(net.valve) assert len(collections["heat_exchanger"]) == 2 assert isinstance(collections["heat_exchanger"][0], PatchCollection) assert isinstance(collections["heat_exchanger"][1], LineCollection) assert len(collections["heat_exchanger"][0].get_paths()) == 2 * len(net.heat_exchanger[ net.heat_exchanger.in_service]) assert len(collections["heat_exchanger"][1].get_paths()) == 2 * len(net.heat_exchanger[ net.heat_exchanger.in_service]) assert len(collections["pump"]) == 2 assert isinstance(collections["pump"][0], PatchCollection) assert isinstance(collections["pump"][1], LineCollection) assert len(collections["pump"][0].get_paths()) == len(net.pump[net.pump.in_service]) assert len(collections["pump"][1].get_paths()) == 4 * len(net.pump[net.pump.in_service]) assert len(collections["circ_pump_pressure"]) == 2 assert isinstance(collections["circ_pump_pressure"][0], PatchCollection) assert isinstance(collections["circ_pump_pressure"][1], LineCollection) assert len(collections["circ_pump_pressure"][0].get_paths()) == len(net.circ_pump_pressure[ net.circ_pump_pressure.in_service]) assert len(collections["circ_pump_pressure"][1].get_paths()) == 4 * len(net.circ_pump_pressure[ net.circ_pump_pressure.in_service]) assert len(collections["circ_pump_mass"]) == 2 assert isinstance(collections["circ_pump_mass"][0], PatchCollection) assert isinstance(collections["circ_pump_mass"][1], LineCollection) assert len(collections["circ_pump_mass"][0].get_paths()) == \ len(net.circ_pump_mass[net.circ_pump_mass.in_service]) assert len(collections["circ_pump_mass"][1].get_paths()) == \ 4 * len(net.circ_pump_mass[net.circ_pump_mass.in_service]) assert len(collections["flow_control"]) == 2 assert isinstance(collections["flow_control"][0], PatchCollection) assert isinstance(collections["flow_control"][1], LineCollection) assert len(collections["flow_control"][0].get_paths()) == \ 3 * len(net.flow_control[net.flow_control.in_service]) assert len(collections["flow_control"][1].get_paths()) == \ 2 * len(net.flow_control[net.flow_control.in_service])
[ 9, 53, 5321, 1737, 47, 549 ]
def METHOD_NAME(path: path_type): # Extract OSS configuration from the encoded URL. str_path = stringify_path(path) parse_result = oss2.urlparse(str_path) if parse_result.scheme != "oss": raise ValueError( f"Except scheme oss, but got scheme: {parse_result.scheme}" f" in path: {str_path}" ) bucket = parse_result.hostname if not (parse_result.username and parse_result.password): raise RuntimeError(r"Please use build_oss_path to add OSS info") param_dict = url_to_dict(parse_result.username) access_key_id = param_dict["access_key_id"] access_key_secret = parse_result.password end_point = param_dict["end_point"] key = parse_result.path key = key[1:] if key.startswith("/") else key return bucket, key, access_key_id, access_key_secret, end_point
[ 214, -1 ]
def METHOD_NAME(self): """ Test basic modules start and stop """ self.render_config_template( reload=True, reload_path=self.working_dir + "/configs/*.yml", reload_type="modules", inputs=False, ) proc = self.start_beat() os.mkdir(self.working_dir + "/logs/") logfile = self.working_dir + "/logs/test.log" os.mkdir(self.working_dir + "/configs/") with open(self.working_dir + "/configs/system.yml.test", 'w') as f: f.write(moduleConfigTemplate.format(self.working_dir + "/logs/*")) os.rename(self.working_dir + "/configs/system.yml.test", self.working_dir + "/configs/system.yml") with open(logfile, 'w') as f: f.write("Hello world\n") self.wait_until(lambda: self.output_lines() == 1, max_timeout=10) print(self.output_lines()) # Remove input with open(self.working_dir + "/configs/system.yml", 'w') as f: f.write("") # Wait until input is stopped self.wait_until( lambda: self.log_contains("Stopping runner:"), max_timeout=15) with open(logfile, 'a') as f: f.write("Hello world\n") # Wait to give a change to pick up the new line (it shouldn't) time.sleep(1) self.wait_until(lambda: self.output_lines() == 1, max_timeout=5) proc.check_kill_and_wait()
[ 9, 447, 631 ]
def METHOD_NAME(self, on): pass # No joystick used
[ 3987, 69, 3988 ]
def METHOD_NAME(applications_stream): response = create_response({"link": f'<https://harvest.greenhouse.io/v1/applications?per_page={100}&since_id=123456789>; rel="next"'}) next_page_token = applications_stream.retriever._next_page_token(response=response) request_params = applications_stream.retriever._request_params(next_page_token=next_page_token, stream_state={}) path = applications_stream.retriever._paginator_path() assert "applications?per_page=100&since_id=123456789" == path assert request_params == {"per_page": 100}
[ 9, 377, 434, 243, 1174, 466, 137 ]
def METHOD_NAME(self): p = Plot(["a", "b", "c"], [1, 0, 2]).add(Bar()).plot() ax = p._figure.axes[0] assert len(ax.patches) == 2
[ 9, 313, 1877, 79 ]
def METHOD_NAME(): pins = pinutils.scan_pin_file([], 'stm32f401.csv', 5, 8, 9) pins = pinutils.scan_pin_af_file(pins, 'stm32f401_af.csv', 0, 1) return pinutils.only_from_package(pinutils.fill_gaps_in_pin_list(pins), chip["package"])
[ 19, 3783 ]
def METHOD_NAME(self): addr = account.Address('UŠer', 'example.com', case_sensitive=True) self.assertNotEqual(addr, 'uš[email protected]')
[ 9, 2338, 774, 331, 3719 ]
def METHOD_NAME(x): return Xdot(x) - mdot(x)
[ 6405 ]
def METHOD_NAME(self, notebook): imageList = wx.ImageList(24, 24) image = wx.Image(PPRZ_HOME + "/data/pictures/gray_led24.png") bitmap = wx.BitmapFromImage(image) imageList.Add(bitmap) image = wx.Image(PPRZ_HOME + "/data/pictures/green_led24.png") bitmap = wx.BitmapFromImage(image) imageList.Add(bitmap) notebook.AssignImageList(imageList)
[ 102, 660, 245 ]
def METHOD_NAME(payload: WildValue) -> str: return RADARR_MESSAGE_TEMPLATE_MOVIE_IMPORTED.format( movie_title=payload["movie"]["title"].tame(check_string) )
[ 19, 2829, 43, 1786, 190, 417 ]
def METHOD_NAME(self, meta, form_id, status): self.stdout.write( f'{meta.domain},{meta.case_id},{meta.referenced_id},{meta.index_identifier},{form_id},{status}' )
[ 77, 146 ]
def METHOD_NAME(h, f): if h.startswith(b'\0\0'): rate = get_short_le(h[2:4]) if 4000 <= rate <= 25000: return 'sndr', rate, 1, -1, 8
[ 9, 17271 ]
def METHOD_NAME(box_dict): return np.array([ [ box_dict['Lx'], box_dict['Ly'] * box_dict['xy'], box_dict['Lz'] * box_dict['xz'] ], [0, box_dict['Ly'], box_dict['Lz'] * box_dict['yz']], [0, 0, box_dict['Lz']], ])
[ 391, 430 ]
def METHOD_NAME( cls, type: Literal["atlas", "modes"], name: str, image_path: Path | str, labels_path: Path | str, ) -> Self: image_path = Path(image_path) labels_frame = read_spreadsheet(labels_path) labels: dict[int, str] = dict() for label_tuple in labels_frame.itertuples(index=False): # First columnn is the index, second is the name. labels[int(label_tuple[0])] = format_like_bids(str(label_tuple[1])) image = nib.loadsave.load(image_path) return cls(type, name, image, labels)
[ 280, 335 ]
def METHOD_NAME(self): """This write a function of object Chip""" if "external" in self: return "" code = self.symtab.codeFormatter() # Generate function header void_type = self.symtab.find("void", Type) return_type = self.return_type.c_ident if "return_by_ref" in self and self.return_type != void_type: return_type += "&" if "return_by_pointer" in self and self.return_type != void_type: return_type += "*" params = ", ".join(self.param_strings) code( """
[ 567, 544 ]
def METHOD_NAME(api_name: str): if not api_name.isidentifier(): raise InvalidArgument( "Invalid API name: '{}', a valid identifier may only contain letters," " numbers, underscores and not starting with a number.".format(api_name) ) if api_name in RESERVED_API_NAMES: raise InvalidArgument( "Reserved API name: '{}' is reserved for infra endpoints".format( api_name ) )
[ 187, 156 ]
def METHOD_NAME(self): """Close training areas map - be quiet""" verbosity = G_verbose() G_set_verbose(0) DisplayDriver.METHOD_NAME(self) G_set_verbose(verbosity)
[ 1462, 422 ]
f METHOD_NAME(self):
[ 9, 5182, 293 ]
def METHOD_NAME(self, model_id, locally, output): if locally: is_loaded, model_path = ModelDeploy(self.ctx, self.project).verify_local_model(model_id) if not is_loaded: raise AugerException('Model should be deployed locally.') return ModelReview({'model_path': model_path}).METHOD_NAME( data_path=self.ctx.config.get("source"), output=output) else: raise Exception("Not Implemented.")
[ 56, 4381, 365 ]
def METHOD_NAME(seq, refmodels): refmodels.Reference.objects.all().delete() project1 = factories.ProjectFactory.create() seqname1 = refmodels.make_sequence_name(project1) project2 = factories.ProjectFactory.create() seqname2 = refmodels.make_sequence_name(project2) seq.alter(seqname1, 100) seq.alter(seqname2, 200) issue = factories.IssueFactory.create(project=project1) assert issue.ref == 101 issue.subject = "other" issue.save() assert issue.ref == 101 issue.project = project2 issue.save() assert issue.ref == 201
[ 9, 4920, 946, 272, 69, 155, 194 ]
def METHOD_NAME(self): """Prints the daemon status.""" if self.is_running(): self.echo("Daemon is running") else: self.echo("Daemon is not running")
[ 452 ]
def METHOD_NAME(test_case, shape, dtype, device): np_input = np.random.randint(3, size=shape) np_other = np.random.randint(3, size=shape) input = flow.tensor(np_input, dtype=dtype, device=flow.device(device)) other = flow.tensor(np_other, dtype=dtype, device=flow.device(device)) of_out = flow.logical_and(input, other) np_out = np.logical_and(np_input, np_other) test_case.assertTrue(np.array_equal(of_out.numpy(), np_out)) x = torch.ones(3).byte() y = torch.ones(3).byte() z = (x & ~y).bool() test_case.assertTrue(np.array_equal(z.numpy(), [False, False, False]))
[ 9, 1692, 61 ]
def METHOD_NAME(self): return jsonify_data(settings_box=self.render_settings_box(), right_header=render_event_management_header_right(self.event))
[ 12923, 1434 ]
def METHOD_NAME(url) -> str: filename = list(filter(lambda x: x!='', url.split('/')))[-1] download_path = f"{TEMPDIR}/{filename}" with open(download_path, 'wb') as f: f.write(requests.get(url).content) return download_path
[ 136 ]
def METHOD_NAME(self, input: Tensor, target: Tensor) -> Tensor: fake_img, latents = self.generator(input, return_latents=True) path_loss, self.mean_path_length, path_lengths = self.g_path_regularize( fake_img, latents, self.mean_path_length) return path_loss
[ 849, 76, 4197, 157 ]
def METHOD_NAME(dmId): nameFilePath = os.path.join(getSysfsPath(dmId), "dm", "name") with open(nameFilePath, "r") as f: return f.readline().rstrip("\n")
[ 19, 828, 156 ]
def METHOD_NAME(buffer_connection): from moler.cmd.unix import iperf buffer_connection.remote_inject_response([iperf.COMMAND_OUTPUT_basic_server]) iperf_cmd = iperf.Iperf(connection=buffer_connection.moler_connection, **iperf.COMMAND_KWARGS_basic_server) assert iperf_cmd() == iperf.COMMAND_RESULT_basic_server
[ 9, 12383, 3705, 5378, 756, 906, 163 ]
def METHOD_NAME(path, mode): parts = str(path).split(".") assert len(parts) > 1, "Extension needed to figure out serialization format" if len(parts) == 2: db_format = parts[-1] compression = None else: db_format = parts[-2] compression = parts[-1] assert compression is None or compression in COMPRESSION_FORMATS assert db_format in SERIALIZATION_FORMATS store_constructor = SERIALIZATION_FORMATS[db_format] if compression == "gz": with gzip.GzipFile(path, mode) as f: yield store_constructor(f) elif compression == "zstd": if "w" in mode or "a" in mode: cctx = zstandard.ZstdCompressor() with open(path, mode) as f: with cctx.stream_writer(f) as writer: yield store_constructor(writer) else: dctx = zstandard.ZstdDecompressor() with open(path, mode) as f: with dctx.stream_reader(f) as reader: yield store_constructor(reader) else: with open(path, mode) as f: yield store_constructor(f)
[ 1267, 1452 ]
def METHOD_NAME(d): file_list = [] for dirname, dirs, names in os.walk(d): file_list.append((dirname, filter(lambda x, d=dirname: is_file_or_link(d, x), names))) return file_list
[ 245, 1537, 2203 ]
def METHOD_NAME(text): return text_attribute(text, 'cyan')
[ 14407 ]
def METHOD_NAME(self) -> Path: """Path to PUDL output directory.""" return Path(self.pudl_output)
[ 146, 1190 ]
def METHOD_NAME(self): tcl = self.interp filename = "testEvalFile.tcl" fd = open(filename,'w') script = """set a 1 set b 2 set c [ expr $a + $b ] """ fd.write(script) fd.close() tcl.evalfile(filename) os.remove(filename) self.assertEqual(tcl.eval('set a'),'1') self.assertEqual(tcl.eval('set b'),'2') self.assertEqual(tcl.eval('set c'),'3')
[ 9, 1171, 171 ]
def METHOD_NAME(args, model, device, train_loader, optimizer, epoch): model.METHOD_NAME() for batch_idx, (data, target) in enumerate(train_loader): data, target = data.to(device), target.to(device) optimizer.zero_grad() output = model(data) loss = F.nll_loss(output, target) loss.backward() optimizer.step() if batch_idx % args.log_interval == 0: print( "Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}".format( epoch, batch_idx * len(data), len(train_loader.dataset), 100.0 * batch_idx / len(train_loader), loss.item(), ) ) if args.dry_run: break
[ 849 ]
def METHOD_NAME(self): return self.request('/store-api/country', name='countries')
[ 3235 ]
def METHOD_NAME( filepath, fileposition ): with open( filepath, encoding = 'utf8' ) as f: if fileposition: f.seek( fileposition ) return f.read(), f.tell()
[ 203, 171 ]
def METHOD_NAME(self, js_name): with open(os.path.join(TEST_DIR, js_name)) as f: js = f.read() # WebKit.run_javascript() needs some serializable return value js += '\ntrue\n' result = None def on_done(_group, name, value): # cockpit-client resets the value to '' initially, to guarantee that a Changed signal happens # even when two consecutive run-js calls have the same result v = value.get_string() if v == '': return nonlocal result result = v def on_timeout(): nonlocal result result = "timed out waiting for JavaScript result" handler = self.win_actions.connect('action-state-changed::run-js', on_done) self.win_actions.activate_action("run-js", GLib.Variant.new_string(self.testlib + js)) main = GLib.MainContext.default() GLib.timeout_add_seconds(JS_TIMEOUT, on_timeout) while not result: main.iteration(may_block=True) self.win_actions.disconnect(handler) return result
[ 22, 3382 ]
def METHOD_NAME(self): ''' Stops (Unregisters) the client. ''' self.sub.unregister()
[ 631 ]
async def METHOD_NAME(pipeline_response): deserialized = self._deserialize("OperationsList", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem)
[ 297, 365 ]
def METHOD_NAME(self): data_util = datetime(2023, 4, 17) self.assertTrue(self.calendar.data_eh_dia_util_bancario(data_util)) data_nao_util = datetime(2023, 4, 15) self.assertFalse(self.calendar.data_eh_dia_util_bancario(data_nao_util)) data_nao_util = datetime(2023, 4, 16) self.assertFalse(self.calendar.data_eh_dia_util_bancario(data_nao_util)) data_feriado = datetime(2023, 4, 21) self.assertTrue(self.calendar.data_eh_dia_util_bancario(data_feriado))
[ 9, 365, 10106, 14356, 1960, 14357 ]
def METHOD_NAME(self, node): return (node.y - self.min_y) * self.x_width + (node.x - self.min_x)
[ 1407, 724 ]
def METHOD_NAME(self, library: "ElementLibrary"): library_name = library.get_name() if self.__module is None: _TaipyLogger._get_logger().info( f"Python API for extension library '{library_name}' will not be available. To fix this, import 'taipy.gui.builder' before importing the extension library." ) return library_module = getattr(self.__module, library_name, None) if library_module is None: library_module = types.ModuleType(library_name) setattr(self.__module, library_name, library_module) for element_name in library.get_elements().keys(): setattr( library_module, element_name, _ElementApiGenerator().createControlElement(element_name, f"{library_name}.{element_name}"), )
[ 238, 3106 ]
def METHOD_NAME(filters, kernel_size, name=None): return Sequential([ ReLU(), SeparableConv2D(filters, kernel_size, padding='same'), BatchNormalization(trainable=True), ], name)
[ 56, 2479, 1306 ]
def METHOD_NAME(self, point, index=np.s_[:], p=None, scaled=True): smlphi = self.smspace.METHOD_NAME(point, index=index, p=p) ldof = self.number_of_local_dofs(p=p, doftype='cell') shape = point.shape[:-1] + (ldof, 2) lphi = np.zeros(shape, dtype=self.ftype) lphi[..., :ldof//2, 0] = smlphi lphi[..., -ldof//2:, 1] = smlphi return lphi
[ 13322, 1189 ]
def METHOD_NAME(self): error = APIException({ 'non_field_errors': ['Error message.'] }) response = exception_handler(error, None) assert response.data == { 'code': 'error', 'message': 'Error message.', }
[ 9, 437, 168, 101, 97 ]
def METHOD_NAME(working_dir, bundler_dir, thresh, new_bundles): if path_exists(join(working_dir, '.git')): last_bundle = None last_bundlename = newest_bundle_filename(bundler_dir) # "git gc" before bundling is *very* important, since # otherwise the bundles mushroom for no good reason. os.chdir(working_dir) cmd(['git', 'commit', '-a', '-m', 'Salvus save'], ignore_errors=True) cmd(['git', 'gc']) # Try making a new bundle first cmd([diffbundler, 'create', working_dir, bundler_dir], dry_run=False) new_bundlename = newest_bundle_filename(bundler_dir) if new_bundlename != last_bundlename: # There were changes to the repo that we had not already bundled up. # First, check if we should merge the last two bundles. if thresh > 0 and last_bundlename is not None and getsize(last_bundlename)/1000000.0 < thresh: os.unlink(last_bundlename) os.unlink(new_bundlename) cmd([diffbundler, 'create', working_dir, bundler_dir], dry_run=False) new_bundles.append(last_bundlename) else: new_bundles.append(new_bundlename) for path in listdir(working_dir): p = join(working_dir, path) if isdir(p) and not islink(p): METHOD_NAME(p, join(bundler_dir, path), thresh, new_bundles)
[ 129, -1 ]
def METHOD_NAME(): import os import subprocess # Get the repository directory repo_dir = os.path.abspath(os.path.normpath(os.path.join(os.path.dirname(__file__), "..", ".."))) # Attempt to get the configured username from the local Git try: result = subprocess.run(["git", "config", "user.username"], stdout=subprocess.PIPE, cwd=repo_dir) result.check_returncode() # Check if the command was executed successfully username = result.stdout.decode().rstrip() return username except subprocess.CalledProcessError as ex: # Handle errors if the git config command fails print(f"Error fetching Git username: {ex}") return None
[ 1493, 2072, 2991 ]
def METHOD_NAME(): lx = guess_lexer(get_input('easytrieve', 'example.ezt')) assert lx.__class__.__name__ == 'EasytrieveLexer' lx = guess_lexer(get_input('easytrieve', 'example.mac')) assert lx.__class__.__name__ == 'EasytrieveLexer'
[ 9, 1363, 5886, -1 ]
def METHOD_NAME(data): if not isinstance(data, bytes) and hasattr(data, 'encode'): data = data.encode('utf-8') # Don't bail out with an exception if data is None return data if data is not None else b''
[ 4897, 24, 321 ]
def METHOD_NAME(): import torch.nn as nn class Net(ModelSpace): def __init__(self): super().__init__() self.repeat = Repeat( lambda index: LayerChoice([nn.Identity(), nn.Identity()], label=f'layer{index}'), (3, 5), label='rep') def forward(self, x): return self.module(x) net = Net() assert net.contains({'rep': 3, 'layer0': 0, 'layer1': 0, 'layer2': 0}) assert not net.contains({'rep': 4, 'layer0': 0, 'layer1': 0, 'layer2': 0}) assert net.contains({'rep': 3, 'layer0': 0, 'layer1': 0, 'layer2': 0, 'layer3': 0})
[ 9, 5293, 1992 ]
def METHOD_NAME(backend): return getattr(getattr(backend, "AUTH_BACKEND", backend), "name", None)
[ 19, 3127, 156 ]
def METHOD_NAME(module, min_num_params: Optional[int] = None, **kwargs): """ Helper to wrap layers/modules in FSDP. This falls back to a no-op if fairscale is not available. Args: module (nn.Module): module to (maybe) wrap min_num_params (int, Optional): minimum number of layer params to wrap """ try: from fairscale.nn import wrap if min_num_params is not None: num_params = sum(p.numel() for p in module.parameters()) if num_params >= min_num_params: return wrap(module, **kwargs) else: return module else: return wrap(module, **kwargs) except ImportError: return module
[ 8250, 503 ]
def METHOD_NAME(self): """The PUSH socket for use in the zmq message destructor callback.""" if getattr(self, "_stay_down", False): raise RuntimeError("zmq gc socket requested during shutdown") if not self.is_alive() or self._push is None: self._push = self.context.socket(zmq.PUSH) self._push.connect(self.url) return self._push
[ 1013, 1083 ]
f METHOD_NAME(self):
[ 129, 126 ]
def METHOD_NAME( workspace_process_context: WorkspaceProcessContext, query: str, variables: Optional[Mapping[str, object]] = None, ): check.inst_param( workspace_process_context, "workspace_process_context", WorkspaceProcessContext ) check.str_param(query, "query") check.opt_mapping_param(variables, "variables") query = query.strip("'\" \n\t") context = workspace_process_context.create_request_context() result = create_schema().execute( query, context_value=context, variable_values=variables, ) result_dict = result.formatted # Here we detect if this is in fact an error response # If so, we iterate over the result_dict and the original result # which contains a GraphQLError. If that GraphQL error contains # an original_error property (which is the exception the resolver # has thrown, typically) we serialize the stack trace of that exception # in the 'stack_trace' property of each error to ease debugging if "errors" in result_dict: result_dict_errors = check.list_elem(result_dict, "errors", of_type=Exception) result_errors = check.is_list(result.errors, of_type=Exception) check.invariant(len(result_dict_errors) == len(result_errors)) # for python_error, error_dict in zip(result_errors, result_dict_errors): if hasattr(python_error, "original_error") and python_error.original_error: error_dict["stack_trace"] = get_stack_trace_array(python_error.original_error) return result_dict
[ 750, 539 ]
def METHOD_NAME(self): """Install procedure for Perl modules: using either Makefile.Pl or Build.PL.""" prefix_opt = self.cfg.get('prefix_opt') # Perl modules have two possible installation procedures: using Makefile.PL and Build.PL # configure, build, test, install if os.path.exists('Makefile.PL'): if prefix_opt is None: prefix_opt = 'PREFIX' install_cmd = ' '.join([ self.cfg['preconfigopts'], 'perl', 'Makefile.PL', '%s=%s' % (prefix_opt, self.installdir), self.cfg['configopts'], ]) run_cmd(install_cmd) ConfigureMake.build_step(self) ConfigureMake.test_step(self) ConfigureMake.install_step(self) elif os.path.exists('Build.PL'): if prefix_opt is None: prefix_opt = '--prefix' install_cmd = ' '.join([ self.cfg['preconfigopts'], 'perl', 'Build.PL', prefix_opt, self.installdir, self.cfg['configopts'], ]) run_cmd(install_cmd) run_cmd("%s perl Build build %s" % (self.cfg['prebuildopts'], self.cfg['buildopts'])) if self.cfg['runtest']: run_cmd('perl Build %s' % self.cfg['runtest']) run_cmd('%s perl Build install %s' % (self.cfg['preinstallopts'], self.cfg['installopts']))
[ 428, 4621, 298 ]
def METHOD_NAME(): client, cluster, aws_nodes = create_and_validate_import_cluster() cluster_cleanup(client, cluster, aws_nodes)
[ 9, 512, 15961, 2059 ]
def METHOD_NAME(fn): listen(target, identifier, fn, *args, **kw) return fn
[ 3949 ]
def METHOD_NAME(self): self.copy("LICENSE_1_0.txt", dst="licenses", src=self._source_subfolder) if self.options.c_api: cmake = self._configure_cmake() cmake.install() tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig")) tools.rmdir(os.path.join(self.package_folder, "lib", "cmake")) else: self.copy("*.h", dst="include", src=os.path.join(self._source_subfolder, "include")) self.copy("*.hpp", dst="include", src=os.path.join(self._source_subfolder, "include"))
[ 360 ]
def METHOD_NAME(LMS: ArrayLike) -> NDArrayFloat: """ Callable applying the forward non-linearity to the :math:`LMS` colourspace array. """ with domain_range_scale("ignore"): return eotf_inverse_ST2084(LMS)
[ 4858, 1305, 24, 4858, 1305, 2054, 1448 ]
f METHOD_NAME(self):
[ 9, 5675 ]
def METHOD_NAME(self, obj, attrs, user, *args, **kwargs): raw_data = {option.key: option.value for option in attrs} data = {} for key, uo in USER_OPTION_SETTINGS.items(): val = raw_data.get(uo["key"], uo["default"]) if uo["type"] == bool: data[key.value] = bool(int(val)) # '1' is true, '0' is false elif uo["type"] == int: data[key.value] = int(val) data["weeklyReports"] = True # This cannot be overridden return data
[ 183 ]
def METHOD_NAME(org_user): """Everybody is considered to have viewer perms.""" return org_user.role_level >= ROLE_VIEWER
[ 139, 1574 ]
def METHOD_NAME(self): export_conandata_patches(self)
[ 294, 505 ]
def METHOD_NAME(self): constraint = gpytorch.constraints.GreaterThan([1.0, 2.0]) v = torch.tensor([-3.0, -2.0]) value = constraint.transform(v) actual_value = v.clone() actual_value[0] = softplus(v[0]) + 1.0 actual_value[1] = softplus(v[1]) + 2.0 self.assertAllClose(value, actual_value)
[ 9, 1053, 768, 3657, 489 ]
def METHOD_NAME(data): """For a 44-year-old, the api should always return an age, a full retirement age and a value for benefits at age 70 """ if ( data["current_age"] == 44 and data["data"]["full retirement age"] == "67" and data["data"]["benefits"]["age 70"] ): return "OK" else: return "BAD DATA"
[ 250, 365 ]
def METHOD_NAME(parent, dest): dest["author_detail"] = detail = FeedParserDict() if "name" in parent: dest["author"] = detail["name"] = parent["name"] if "url" in parent: if parent["url"].startswith("mailto:"): detail["email"] = parent["url"][7:] else: detail["href"] = parent["url"]
[ 214, 2997 ]
def METHOD_NAME(list1, list2): """ Examples: >>> issubset([], [65, 66, 67]) True >>> issubset([65], [65, 66, 67]) True >>> issubset([65, 66], [65, 66, 67]) True >>> issubset([65, 67], [65, 66, 67]) False """ n = len(list1) for startpos in range(len(list2) - n + 1): if list2[startpos:startpos+n] == list1: return True return False
[ 7504 ]
async def METHOD_NAME() -> None: @get("/") def handler() -> None: return None config = RateLimitConfig(rate_limit=("second", 1)) cache_key = "RateLimitMiddleware::testclient" app = Litestar(route_handlers=[handler], middleware=[config.middleware]) store = app.stores.get("rate_limit") with TestClient(app=app) as client: response = client.get("/") assert response.status_code == HTTP_200_OK cached_value = await store.get(cache_key) assert cached_value cache_object = CacheObject(**decode_json(value=cached_value)) assert cache_object.reset == int(time() + 1) cache_object.reset -= 2 await store.set(cache_key, encode_json(cache_object)) response = client.get("/") assert response.status_code == HTTP_200_OK
[ 9, 656 ]
def METHOD_NAME(): o = Organizer.objects.create(name='Dummy', slug='dummy') with scope(organizer=o): event = Event.objects.create( organizer=o, name='Dummy', slug='dummy', date_from=now(), live=True ) o1 = Order.objects.create( code='FOOBAR', event=event, email='[email protected]', status=Order.STATUS_PENDING, datetime=now(), expires=now() + timedelta(days=10), total=Decimal('13.37'), ) shirt = Item.objects.create(event=event, name='T-Shirt', default_price=12) shirt_red = ItemVariation.objects.create(item=shirt, default_price=14, value="Red") OrderPosition.objects.create( order=o1, item=shirt, variation=shirt_red, price=12, attendee_name_parts={}, secret='1234' ) OrderPosition.objects.create( order=o1, item=shirt, variation=shirt_red, price=12, attendee_name_parts={}, secret='5678' ) yield event, o1, shirt
[ 485 ]
def METHOD_NAME(self): expected = [ ("can't assign to keyword", "None", 80, self.FatalError), ] actual = self.compile_file("None = 42") self.assertEqual(expected, actual)
[ 9, 776, 24, 98 ]
def METHOD_NAME(self, request): args = request.args chdict = { "revision": args.get(b'revision'), "repository": args.get(b'_repository') or '', "project": args.get(b'project') or '', "codebase": args.get(b'codebase') } return ([chdict], None)
[ 19, 1103 ]
def METHOD_NAME(region): """ Return the region argument. """ return " --region {r}".format(r=region)
[ 1216 ]
async def METHOD_NAME( src: StrOrBytesPath, dst: StrOrBytesPath, *, src_dir_fd: int | None = None, dst_dir_fd: int | None = None, loop: AbstractEventLoop | None = ..., executor: Any = ..., ) -> None: ...
[ 369 ]
def METHOD_NAME(project_ids, state, credentials): """Returns the summary of recommendations on all the given projects. Args: project_ids: List(str) project to which recommendation is needed. state: state of recommendations credentials: client credentials. """ recommender = build("recommender", "v1", credentials=credentials, cache_discovery=False) def get_metric(project_id): recommendation_metric = common.get_recommendations( project_id, recommender=recommender, state=state, credentials=credentials) return accounts_can_made_safe(project_id, state, recommendation_metric) recommendation_stats = common.rate_limit_execution(get_metric, RATE_LIMIT, project_ids) recommendation_stats_sorted = sorted( recommendation_stats, key=lambda metric: -sum(metric["stats"].values())) return recommendation_stats_sorted
[ 19, 8912, 2718, 47, 2847 ]
def METHOD_NAME(self, application_paused): self.application_paused = application_paused
[ 88, 7830, 1076 ]
def METHOD_NAME(queue: str, chunk_info): """Manually queue chunk when a job is stuck for whatever reason.""" from .cluster import _create_atomic_chunk from .cluster import create_parent_chunk from .utils import chunk_id_str redis = get_redis_connection() imanager = IngestionManager.from_pickle(redis.get(r_keys.INGESTION_MANAGER)) layer = chunk_info[0] coords = chunk_info[1:] queue = imanager.get_task_queue(queue) if layer == 2: func = _create_atomic_chunk args = (coords,) else: func = create_parent_chunk args = (layer, coords) queue.enqueue( func, job_id=chunk_id_str(layer, coords), job_timeout=f"{int(layer * layer)}m", result_ttl=0, args=args, )
[ 453, 464 ]
def METHOD_NAME(value_str): try: value = int(value_str) if value > 0: return value except ValueError: pass raise argparse.ArgumentTypeError('must be a positive integer (got {!r})'.format(value_str))
[ 2302, 962, 718 ]
def METHOD_NAME(self, idx: int, key: str) -> Optional[str]: return self._request_processor[idx].get_request_property(key)
[ 19, 377, 572 ]
f METHOD_NAME(self, inInputFilter):
[ 129, 8904, 1179, 527 ]
f METHOD_NAME(self, url):
[ 1452 ]
def METHOD_NAME(self) -> dict | None: """ Return structured config for patch_panel """ if not self.shared_utils.network_services_l1: return None patches = [] for tenant in self._filtered_tenants: if "point_to_point_services" not in tenant: continue for point_to_point_service in natural_sort(tenant["point_to_point_services"], "name"): if subifs := point_to_point_service.get("subinterfaces", []): subifs = [subif for subif in subifs if subif.get("number") is not None] for endpoint in point_to_point_service.get("endpoints", []): if self.shared_utils.hostname not in endpoint.get("nodes", []): continue node_index = list(endpoint["nodes"]).index(self.shared_utils.hostname) interface = endpoint["interfaces"][node_index] if get(endpoint, "port_channel.mode") in ["active", "on"]: channel_group_id = "".join(re.findall(r"\d", interface)) interface = f"Port-Channel{channel_group_id}" if subifs: for subif in subifs: patch = { "name": f"{point_to_point_service['name']}_{subif['number']}", "enabled": True, "connectors": [ { "id": 1, "type": "interface", "endpoint": f"{interface}.{subif['number']}", }, ], } if point_to_point_service.get("type") == "vpws-pseudowire": patch["connectors"].append( { "id": 2, "type": "pseudowire", "endpoint": f"bgp vpws {tenant['name']} pseudowire {point_to_point_service['name']}_{subif['number']}", } ) append_if_not_duplicate( list_of_dicts=patches, primary_key="name", new_dict=patch, context="Patches defined under point_to_point_services", context_keys=["name"], ) else: patch = { "name": f"{point_to_point_service['name']}", "enabled": True, "connectors": [ { "id": 1, "type": "interface", "endpoint": f"{interface}", } ], } if point_to_point_service.get("type") == "vpws-pseudowire": patch["connectors"].append( { "id": 2, "type": "pseudowire", "endpoint": f"bgp vpws {tenant['name']} pseudowire {point_to_point_service['name']}", } ) append_if_not_duplicate( list_of_dicts=patches, primary_key="name", new_dict=patch, context="Patches defined under point_to_point_services", context_keys=["name"], ) if patches: return {"patches": patches} return None
[ 1575, 519 ]
def METHOD_NAME() -> None: default_processes = max(1, cpu_count()) parser = argparse.ArgumentParser(description=__doc__, parents=[DB().argparser]) if db.nmap is None: fltnmap = None else: fltnmap = db.nmap.flt_empty if db.passive is None: fltpass = None else: fltpass = db.passive.flt_empty _from: List[Generator[Record, None, None]] = [] parser.add_argument( "--view-category", metavar="CATEGORY", help="Choose a different category than the default", ) parser.add_argument( "--test", "-t", action="store_true", help="Give results in standard output instead of " "inserting them in database.", ) parser.add_argument( "--verbose", "-v", action="store_true", help="For test output, print out formatted results.", ) parser.add_argument( "--no-merge", action="store_true", help="Do **not** merge with existing results for same host and source.", ) parser.add_argument( "--to-db", metavar="DB_URL", help="Store data to the provided URL instead of the default DB for view.", ) parser.add_argument( "--processes", metavar="COUNT", type=int, help=f"The number of processes to use to build the records. Default on this system is {default_processes}.", default=default_processes, ) subparsers = parser.add_subparsers( dest="view_source", help=("Accepted values are 'nmap' and 'passive'. None or 'all' will do both"), ) if db.nmap is not None: subparsers.add_parser("nmap", parents=[db.nmap.argparser]) if db.passive is not None: subparsers.add_parser("passive", parents=[db.passive.argparser]) subparsers.add_parser("all") args = parser.parse_args() view_category = args.view_category if not args.view_source: args.view_source = "all" if args.view_source == "all": _from = [] if db.nmap is not None: fltnmap = db.nmap.parse_args(args, flt=fltnmap) _from.append(nmap_to_view(fltnmap, category=view_category)) if db.passive is not None: fltpass = db.passive.parse_args(args, flt=fltpass) _from.append(passive_to_view(fltpass, category=view_category)) elif args.view_source == "nmap": if db.nmap is None: parser.error('Cannot use "nmap" (no Nmap database exists)') fltnmap = db.nmap.parse_args(args, fltnmap) _from = [nmap_to_view(fltnmap, category=view_category)] elif args.view_source == "passive": if db.passive is None: parser.error('Cannot use "passive" (no Passive database exists)') fltpass = db.passive.parse_args(args, fltpass) _from = [passive_to_view(fltpass, category=view_category)] if args.test: args.processes = 1 outdb = db.view if args.to_db is None else DBView.from_url(args.to_db) # Output results if args.processes > 1: nprocs = max(args.processes - 1, 1) with Pool( nprocs, initializer=worker_initializer, initargs=(args.to_db, args.no_merge), ) as pool: for _ in pool.imap(merge_and_output, to_view_parallel(_from)): pass for _ in pool.imap(worker_destroyer, [None] * nprocs): pass else: if args.test: def output(host: Record) -> None: return displayfunction_json([host], outdb) elif args.no_merge: output = outdb.store_host else: output = outdb.store_or_merge_host try: datadb = outdb.globaldb.data except AttributeError: datadb = None outdb.start_store_hosts() for record in to_view(_from, datadb): output(record) outdb.stop_store_hosts()
[ 57 ]
def METHOD_NAME(self): """Verify G2 instruction construction""" self.assertEqual(str(MNVR("G2X2Y2I1")), "G2{'I': 1.0, 'X': 2.0, 'Y': 2.0}") self.assertEqual(len(MNVR("G2X2Y2I1").instr), 1) self.assertEqual(type(MNVR("G2X2Y2I1").instr[0]), PathLanguage.MoveArcCW)
[ 2650 ]
def METHOD_NAME(self) -> bool: """ Returns: True if channels is closed, False otherwise. """ return self.get() == "close"
[ 137, 4703 ]
def METHOD_NAME(self, path=None): return self.manager.list_prod("list_output", path)
[ 245, 146 ]
def METHOD_NAME(self) -> None: self._queue = deque(self._items)
[ 656 ]
def METHOD_NAME(cls, labels: Dict[str, str]) -> Metadata: """ Get manifest from image labels. Args: labels: key, value string pairs Returns: Metadata Raises: MetadataError """ metadata_dict = translate_plain_to_tree(labels) try: sonic_metadata = metadata_dict['com']['azure']['sonic'] except KeyError: raise MetadataError('No metadata found in image labels') try: manifest_string = sonic_metadata['manifest'] except KeyError: raise MetadataError('No manifest found in image labels') try: manifest_dict = json.loads(manifest_string) except (ValueError, TypeError) as err: raise MetadataError(f'Failed to parse manifest JSON: {err}') components = {} if 'versions' in sonic_metadata: for component, version in sonic_metadata['versions'].items(): try: components[component] = Version.parse(version) except ValueError as err: raise MetadataError(f'Failed to parse component version: {err}') labels_yang_modules = sonic_metadata.get('yang-module') yang_modules = [] if isinstance(labels_yang_modules, str): yang_modules.append(labels_yang_modules) log.debug("Found one YANG module") elif isinstance(labels_yang_modules, dict): yang_modules.extend(labels_yang_modules.values()) log.debug(f"Found YANG modules: {labels_yang_modules.keys()}") else: log.debug("No YANG modules found") return Metadata(Manifest.marshal(manifest_dict), components, yang_modules)
[ 280, 415 ]
def METHOD_NAME(self, num): pages = num // self.per_page self.page_start += pages return pages * self.per_page
[ 2423 ]
def METHOD_NAME(request): # pragma: no cover """This function takes the arguments passed to the request handler and uses them to generate a WSGI compatible environ dictionary. """ class AwaitablePayload(object): def __init__(self, payload): self.payload = payload or b'' async def read(self, length=None): if length is None: r = self.payload self.payload = b'' else: r = self.payload[:length] self.payload = self.payload[length:] return r uri_parts = urlsplit(request.url) environ = { 'wsgi.input': AwaitablePayload(request.body), 'wsgi.errors': sys.stderr, 'wsgi.version': (1, 0), 'wsgi.async': True, 'wsgi.multithread': False, 'wsgi.multiprocess': False, 'wsgi.run_once': False, 'SERVER_SOFTWARE': 'sanic', 'REQUEST_METHOD': request.method, 'QUERY_STRING': uri_parts.query or '', 'RAW_URI': request.url, 'SERVER_PROTOCOL': 'HTTP/' + request.version, 'REMOTE_ADDR': '127.0.0.1', 'REMOTE_PORT': '0', 'SERVER_NAME': 'sanic', 'SERVER_PORT': '0', 'sanic.request': request } for hdr_name, hdr_value in request.headers.items(): hdr_name = hdr_name.upper() if hdr_name == 'CONTENT-TYPE': environ['CONTENT_TYPE'] = hdr_value continue elif hdr_name == 'CONTENT-LENGTH': environ['CONTENT_LENGTH'] = hdr_value continue key = 'HTTP_%s' % hdr_name.replace('-', '_') if key in environ: hdr_value = '%s,%s' % (environ[key], hdr_value) environ[key] = hdr_value environ['wsgi.url_scheme'] = environ.get('HTTP_X_FORWARDED_PROTO', 'http') path_info = uri_parts.path environ['PATH_INFO'] = path_info environ['SCRIPT_NAME'] = '' return environ
[ 711, 377 ]
def METHOD_NAME(): query = {"operations": {"actions": [ "create_message", "add_qnode(name=glucose, key=n0)", "add_qnode(name=diabetes, key=n1)", "connect(action=connect_nodes, max_path_length=3, shortest_path=false)", "resultify()", "filter_results(action=limit_number_of_results, max_results=30)", "return(message=true, store=true)" ]}} [response, message] = _do_arax_query(query) assert response.status == 'OK' assert len(message.query_graph.edges) >= 3 assert len(message.results) > 0
[ 9, 9500 ]
def METHOD_NAME( labeled_data, annotations_output_dir, images_output_dir, label_format="WKT", ): """Convert Labelbox JSON export to Pascal VOC format. Args: labeled_data (str): File path to Labelbox JSON export of label data. annotations_output_dir (str): File path of directory to write Pascal VOC annotation files. images_output_dir (str): File path of directory to write images. label_format (str): Format of the labeled data. Valid options are: "WKT" and "XY", default is "WKT". Todo: * Add functionality to allow use of local copy of an image instead of downloading it each time. """ # make sure annotation output directory is valid try: annotations_output_dir = os.path.abspath(annotations_output_dir) assert os.path.isdir(annotations_output_dir) except AssertionError: logging.exception("Annotation output directory does not exist") return None # read labelbox JSON output with open(labeled_data) as f: lines = f.readlines() label_data = json.loads(lines[0]) for data in label_data: try: write_label( data["ID"], data["Labeled Data"], data["Label"], label_format, images_output_dir, annotations_output_dir, ) except requests.exceptions.MissingSchema: logging.exception( '"Labeled Data" field must be a URL. ' "Support for local files coming soon", ) continue except requests.exceptions.ConnectionError: logging.exception( "Failed to fetch image from {}".format(data["Labeled Data"]), ) continue
[ 280, 763 ]