code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
def total(r,h): <NEW_LINE> <INDENT> return lateral(r,h) + 2 * base(r) | calcula a area total do cilindro dado o raio(r),e a altura(h)
float, float - float | 625941b394891a1f4081b853 |
def delete_position_data(self, i): <NEW_LINE> <INDENT> self.shot_index.pop(i) <NEW_LINE> self.shot_frame.pop(i) <NEW_LINE> self.array_ball_position_shot_x.pop(i) <NEW_LINE> self.array_ball_position_shot_y.pop(i) <NEW_LINE> self.arrayPlayerAPosition_x.pop(i) <NEW_LINE> self.arrayPlayerAPosition_y.pop(i) <NEW_LINE> self.... | delete position data which is selected tree data
Parameters
----------
i:selected num | 625941b315baa723493c3d1d |
def recurseValidate(doc, doc_class, key, val, attrPath, doc_errors): <NEW_LINE> <INDENT> doc = doc_class(**val) <NEW_LINE> errors = doc.validate() <NEW_LINE> if errors: <NEW_LINE> <INDENT> error = {'attrPath': '.'.join(attrPath), 'fld':key, '_cls': val['_cls'], 'errors': errors} <NEW_LINE> doc_errors.append(error) | this will be called by recursiveDoc function and be executed on each doc/embedded doc | 625941b3e1aae11d1e749a5f |
def gtri(r, a, b): <NEW_LINE> <INDENT> if r[a] > b: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 | gtri op | 625941b3ad47b63b2c509d38 |
def validate_credentials(data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> person = data['person'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise ValidationError("The key 'person' is not present. Please validate as a coursys user.") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> secret = data['secret'] ... | Determine if the data contains a valid user, secret, and unit.
If the data doesn't validate, it will throw a "ValidationError". | 625941b326068e7796caea83 |
def toString(s): <NEW_LINE> <INDENT> if isinstance(s, str): <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> if isinstance(s, bytes): <NEW_LINE> <INDENT> if sys.version_info[0] == 2: <NEW_LINE> <INDENT> return str(s) <NEW_LINE> <DEDENT> return s.decode('ascii') <NEW_LINE> <DEDENT> if isinstance(s, list): <NEW_LINE> <IN... | This takes care of python2/3 differences | 625941b3097d151d1a222c0f |
def characters(self, ch): <NEW_LINE> <INDENT> self.characterElementIdx += 1 <NEW_LINE> if self.inIgnorableElement == 0: <NEW_LINE> <INDENT> if self.characterElementIdx not in self.contentBitSet: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.html += xmlEncode(str(ch)) | generated source for method characters | 625941b3435de62698dfda00 |
def getPosition(poiID): <NEW_LINE> <INDENT> return _getUniversal(tc.VAR_POSITION, poiID) | getPosition(string) -> (double, double)
Returns the position coordinates of the given poi. | 625941b3d8ef3951e32432e9 |
def update(self, lang=None): <NEW_LINE> <INDENT> r <NEW_LINE> lang = _get_translate_language(lang) <NEW_LINE> if lang != _get_default_language(): <NEW_LINE> <INDENT> query = models.Q() <NEW_LINE> _translations = [] <NEW_LINE> for address, text in self._get_changed_fields(): <NEW_LINE> <INDENT> query |= models.Q(**addre... | Update the translations of the `Context`\ 's `purview` in a language. | 625941b36e29344779a623c3 |
def __getitem__(self, name): <NEW_LINE> <INDENT> return self._byname[name] | Retrieve entry by relative path.
:return: tuple with (ctime, mtime, dev, ino, mode, uid, gid, size, sha,
flags) | 625941b3cc40096d61595702 |
def deleteExtraneous(component, ignore_dtstamp=False): <NEW_LINE> <INDENT> for comp in component.components(): <NEW_LINE> <INDENT> deleteExtraneous(comp, ignore_dtstamp) <NEW_LINE> <DEDENT> for line in component.lines(): <NEW_LINE> <INDENT> if line.params.has_key('X-VOBJ-ORIGINAL-TZID'): <NEW_LINE> <INDENT> del line.pa... | Recursively walk the component's children, deleting extraneous details like
X-VOBJ-ORIGINAL-TZID. | 625941b34e696a04525c9201 |
def _filterPitchLabel(self, ticks): <NEW_LINE> <INDENT> post = [] <NEW_LINE> for value, label in ticks: <NEW_LINE> <INDENT> label = _substituteAccidentalSymbols(label) <NEW_LINE> post.append([value, label]) <NEW_LINE> <DEDENT> return post | Given a list of ticks, replace all labels with alternative/unicode symbols where necessary.
| 625941b3e5267d203edcda4d |
def __init__(self): <NEW_LINE> <INDENT> self.val = 0 <NEW_LINE> self.running = True | Initialize the counter to 0, the running-flag to True. | 625941b35510c4643540f1a6 |
def input_fn(data_file, num_epochs, shuffle, batch_size, feature_only=False): <NEW_LINE> <INDENT> assert tf.gfile.Exists(data_file), ( '%s not found. Please make sure you have run data_download.py and ' 'set the --data_dir argument to the correct path.' % data_file) <NEW_LINE> def parse_csv(value): <NEW_LINE> <INDENT> ... | Generate an input function for the Estimator. | 625941b31d351010ab8558d1 |
def mask_correlated_samples(self, batch_size): <NEW_LINE> <INDENT> N = 2 * batch_size <NEW_LINE> mask = torch.ones((N, N), dtype=bool) <NEW_LINE> mask = mask.fill_diagonal_(0) <NEW_LINE> for i in range(batch_size): <NEW_LINE> <INDENT> mask[i, batch_size + i] = 0 <NEW_LINE> mask[batch_size + i, i] = 0 <NEW_LINE> <DEDENT... | Mask correlated samples.
:param batch_size: batch size of the dataset
:type batch_size: int | 625941b34d74a7450ccd3f6f |
def __init__(self, client_manager): <NEW_LINE> <INDENT> self.client_manager = client_manager <NEW_LINE> self.cinder_client = self.client_manager.get_cinder() | :param client_manager:
:return: | 625941b30a50d4780f666c3a |
def __init__(self, wf, project_name): <NEW_LINE> <INDENT> self.project_name = project_name <NEW_LINE> self.wf = wf <NEW_LINE> self.live_seed_list = None <NEW_LINE> self.test_seed_lists = [] <NEW_LINE> self.subject = None <NEW_LINE> self.html = None <NEW_LINE> self.category = None <NEW_LINE> self.from_line = None <NEW_L... | @param wf: Workfront service object
@param project_name: that the created will have. | 625941b3be8e80087fb209fb |
def next_staircase(self, home_floor, dest_floor): <NEW_LINE> <INDENT> if home_floor is None: <NEW_LINE> <INDENT> debug.log('Cannot determine next staircase when home floor is None') <NEW_LINE> return None <NEW_LINE> <DEDENT> if dest_floor is None: <NEW_LINE> <INDENT> debug.log('Cannot determine next staircase when dest... | Return the next staircase that any character on one floor (the home
floor) should ascend or descend next to reach another floor (the
destination floor).
:param home_floor: The home floor.
:param dest_floor: The destination floor. | 625941b3377c676e91271f5e |
def surf_dist(pred_seg, gt_seg, sampling=1, connectivity=1): <NEW_LINE> <INDENT> pred_seg = np.atleast_1d(pred_seg.astype(np.bool)) <NEW_LINE> gt_seg = np.atleast_1d(gt_seg.astype(np.bool)) <NEW_LINE> conn = morphology.generate_binary_structure(pred_seg.ndim, connectivity) <NEW_LINE> S = pred_seg ^ morphology.binary_er... | from https://mlnotebook.github.io/post/surface-distance-function/
Calculates and returns the surface distance between the Ground Truth segmentation and the predicted one.
The surface distance is a vector with length as len(contour(pred_seg)) that indicates for every pixel on the contour,
its distance from the closest p... | 625941b350812a4eaa59c0d2 |
def turn_off(self, **kwargs): <NEW_LINE> <INDENT> for _ in range(self.signal_repetitions): <NEW_LINE> <INDENT> self.tellstick_device.turn_off() <NEW_LINE> <DEDENT> self._brightness = 0 <NEW_LINE> self.update_ha_state() | Turn the switch off. | 625941b307f4c71912b11232 |
def create_prime_iterator(rfrom, rto): <NEW_LINE> <INDENT> prefix = [2] if rfrom < 3 and rto > 1 else [] <NEW_LINE> odd_rfrom = 3 if rfrom < 3 else make_odd(rfrom) <NEW_LINE> odd_numbers = (num for num in xrange(odd_rfrom, rto + 1, 2)) <NEW_LINE> prime_generator = (num for num in odd_numbers if not has_odd_divisor(num)... | Create iterator of prime numbers in range [rfrom, rto] | 625941b34f88993c3716be21 |
def find_device_ids(self): <NEW_LINE> <INDENT> device_info = str(sh.xinput('list', '--short')) <NEW_LINE> id_pattern = r'id=(\d+)' <NEW_LINE> xtest_id_pattern = r'XTEST[^\n]+id=(\d+)' <NEW_LINE> device_ids = list(set(re.findall(id_pattern, device_info)).difference( set(re.findall(xtest_id_pattern, device_info)))) <NEW_... | :return: list of all device ids from xinput, excluding XTEST devices. | 625941b326068e7796caea84 |
def op_tolower(string): <NEW_LINE> <INDENT> return string.lower() | Lowercases string | 625941b3e64d504609d745ec |
def cluster(x, max_num_clusters=3): <NEW_LINE> <INDENT> data = _transform_data(x) <NEW_LINE> centroids = _apply_clustering(data, max_num_clusters) <NEW_LINE> centroids = np.append(centroids, 0) <NEW_LINE> centroids = np.round(centroids).astype(np.int32) <NEW_LINE> centroids = np.unique(centroids) <NEW_LINE> return cent... | Applies clustering on reduced data,
i.e. data where power is greater than threshold.
Parameters
----------
X : pd.Series or single-column pd.DataFrame
max_num_clusters : int
Returns
-------
centroids : ndarray of int32s
Power in different states of an appliance, sorted | 625941b397e22403b379cd45 |
def __init__(self, session): <NEW_LINE> <INDENT> self.session = session <NEW_LINE> tkutil.Dialog.__init__(self, session.tk, 'Open & Reload Multiple Spectra') <NEW_LINE> proj = session.project <NEW_LINE> mfs = tkutil.multiple_file_selection(self.top, proj.sparky_directory) <NEW_LINE> mfs.frame.pack(side = 'top', anchor ... | Initialization
| 625941b376d4e153a657e8dc |
def load(self, request): <NEW_LINE> <INDENT> assignments = request.FILES <NEW_LINE> reader = csv.reader(assignments['csv']) <NEW_LINE> def get_model(model, name, cache): <NEW_LINE> <INDENT> name = name.strip() <NEW_LINE> if not name in cache: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cache[name] = model.objects.get(... | Loads new Assignments. | 625941b330bbd722463cbb6f |
def find_or_create(stimulus, listnum, m, sd, ratings, responses, subjects): <NEW_LINE> <INDENT> obj = None <NEW_LINE> sMsg = "" <NEW_LINE> oErr = ErrHandle() <NEW_LINE> try: <NEW_LINE> <INDENT> obj = Brysbaert.objects.filter(stimulus=stimulus).first() <NEW_LINE> if obj == None: <NEW_LINE> <INDENT> obj = Brysbaert(stimu... | Find existing or create new item | 625941b3f548e778e58cd327 |
def json_gen( tokens, f , first=None): <NEW_LINE> <INDENT> def get_block(tokens, start, end): <NEW_LINE> <INDENT> level = 1 <NEW_LINE> out = start <NEW_LINE> while level > 0: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> t = tokens.next() <NEW_LINE> out += t[1] <NEW_LINE> if t[1] == start: <NEW_LINE> <INDENT> level +=1... | Generator of json objects | 625941b32eb69b55b151c657 |
def __call__(self,request,*args,**kwargs): <NEW_LINE> <INDENT> self._template = None <NEW_LINE> self.response = HttpResponse() <NEW_LINE> self.request = request <NEW_LINE> self.report = Report() <NEW_LINE> self.cookies = CookieHandler(self) <NEW_LINE> self.init(*args,**kwargs) <NEW_LINE> getattr(self,'post' if self.req... | Works when controller executes mapped subclass | 625941b30a50d4780f666c3b |
def shortest_path(source, target): <NEW_LINE> <INDENT> explored = set() <NEW_LINE> start = Node(state = source, parent = None, action = None) <NEW_LINE> frontier = QueueFrontier() <NEW_LINE> frontier.add(start) <NEW_LINE> while True: <NEW_LINE> <INDENT> if frontier.empty(): <NEW_LINE> <INDENT> raise Exception("No solut... | Returns the shortest list of (movie_id, person_id) pairs
that connect the source to the target.
If no possible path, returns None. | 625941b30383005118ecf391 |
def update(self, host, values): <NEW_LINE> <INDENT> return self._update("/os-hosts/%s" % host, values) | Update status or maintenance mode for the host. | 625941b3b7558d58953c4cca |
def classify(self, x): <NEW_LINE> <INDENT> y = np.sign(x.dot(self.w.T)) <NEW_LINE> return y | classify a dataset using the internal PLA weights
x - data set
returns the classifification | 625941b3507cdc57c6306a7e |
def E_MeV_at_reference_depth_cm(energy_MeV): <NEW_LINE> <INDENT> E_MeV_at_2cm = { 70: 48, 150: 138, 226: 215 } <NEW_LINE> return E_MeV_at_2cm[energy_MeV] | Geant4-calculated depth at 2 cm water depth | 625941b363d6d428bbe442a3 |
def _create_database_artifacts(self): <NEW_LINE> <INDENT> pass | Dummy placeholder. Nothing to do. | 625941b38e7ae83300e4ad7c |
def __init__(self, longueur, largeur): <NEW_LINE> <INDENT> self.longueur = longueur <NEW_LINE> self.largeur = largeur <NEW_LINE> self.liste_objets = self.generer_liste_objets() <NEW_LINE> types = self.generer_liste_types() <NEW_LINE> self.map = self._generer_map() | constructeur | 625941b3d6c5a10208143df3 |
def prompt(object): <NEW_LINE> <INDENT> return object.get_name() + ":" + object.get_responder_name() + "> " | ピティナのプロンプトを作る関数
戻り値 'Ptnaオブジェクト名:応答オブジェクト名 > ' | 625941b3f8510a7c17cf94b2 |
def eth_getFilterChanges(self, filter_id): <NEW_LINE> <INDENT> return self.web3.eth.getFilterChanges(filter_id) | https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getfilterchanges
http://web3py.readthedocs.io/en/latest/web3.eth.html#web3.eth.Eth.getFilterChanges | 625941b37b180e01f3dc45b5 |
def HR(p_c, T = 293.15): <NEW_LINE> <INDENT> return exp( p_c / (rho_liq*Rv*T) ) | Clausius-Clapeyron formula: returns the value of relative humidity as a
function of the capillary pressure and temperature
input : capillary pressure p_c [Pa], temperature T [K] | 625941b31b99ca400220a85d |
def images(self, **kwargs): <NEW_LINE> <INDENT> path = self._get_id_path('images') <NEW_LINE> response = self._GET(path, kwargs) <NEW_LINE> self._set_attrs_to_values(response) <NEW_LINE> return response | Get the images for a specific person id.
Returns:
A dict respresentation of the JSON returned from the API. | 625941b33617ad0b5ed67cab |
def numeric(self, source, target, numeric, args, message): <NEW_LINE> <INDENT> if numeric == 1: <NEW_LINE> <INDENT> for ircchannel in self.ircchannels: <NEW_LINE> <INDENT> self.fire(JOIN(ircchannel)) <NEW_LINE> <DEDENT> <DEDENT> elif numeric == 433: <NEW_LINE> <INDENT> self.nick = newnick = "%s_" % self.nick <NEW_LINE>... | numeric Event
This event is triggered by the ``IRC`` Protocol Component when we have
received an IRC Numberic Event from server we are connected to. | 625941b376e4537e8c351423 |
def freq(self, freq : int = None)-> None: <NEW_LINE> <INDENT> ... | 用于获取或者设置 PWM 对象的频率,频率的范围为 [1, 156250]。如果参数为空,返回当前 PWM 对象的频率;如果参数非空,则使用该参数设置当前 PWM 对象的频率。 | 625941b3925a0f43d2549c1f |
def scale_vecs(vecdict): <NEW_LINE> <INDENT> return [scalar_mul(value, 1/key) for (key,value) in vecdict.items()] | >>> v1 = Vec({1,2,3}, {2: 9})
>>> v2 = Vec({1,2,4}, {1: 1, 2: 2, 4: 8})
>>> scale_vecs({3: v1, 5: v2}) == [Vec({1,2,3},{2: 3.0}), Vec({1,2,4},{1: 0.2, 2: 0.4, 4: 1.6})]
True | 625941b31f037a2d8b945fac |
def get_consumers(self, Consumer, channel): <NEW_LINE> <INDENT> return [ Consumer( queues=self.queue, callbacks=[self.on_message], accept=[self.message_default_format], prefetch_count=REANA_JOB_STATUS_CONSUMER_PREFETCH_COUNT if REANA_JOB_STATUS_CONSUMER_PREFETCH_COUNT else None, ) ] | Implement providing kombu.Consumers with queues/callbacks. | 625941b30c0af96317bb7f96 |
def isNotInStr(search , content): <NEW_LINE> <INDENT> if isNotNone(search) and isStr(content): <NEW_LINE> <INDENT> if (len(content) >= len(search)): <NEW_LINE> <INDENT> return (search not in content) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print(... | This function a string dos not contain a subsequence.
A subsequence could be a char, string, or a value!
:param search: string search element
:param content: string content element | 625941b356ac1b37e6263f8f |
def test_users_enrolled_in_active_only(self): <NEW_LINE> <INDENT> CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id, is_active=True) <NEW_LINE> CourseEnrollmentFactory.create(user=self.user_2, course_id=self.course.id, is_active=False) <NEW_LINE> active_enrolled_users = list(CourseEnrollment.objec... | CourseEnrollment.users_enrolled_in should return only Users with active enrollments when
`include_inactive` has its default value (False). | 625941b301c39578d7e74bf1 |
def _sleep_timer(self): <NEW_LINE> <INDENT> if self.EngineModesManager.get_mic_mode() == "on": <NEW_LINE> <INDENT> self.EngineModesManager.set_mic_mode("sleeping") | Puts microphone to sleep if "on" via sleep_timer callback every x seconds | 625941b399fddb7c1c9de148 |
def generate_token(): <NEW_LINE> <INDENT> credentials = oauth2.SpotifyClientCredentials( client_id='37373b59fc3442f88b4880d7bcaff6ea', client_secret='2edca93ea10e41c8b7601d693acad192') <NEW_LINE> token = credentials.get_access_token() <NEW_LINE> return token | Generate the token. Please respect these credentials :) | 625941b3bf627c535bc12f84 |
def ask_if_true(self, query): <NEW_LINE> <INDENT> for _ in self.ask_generator(query): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Return True if the KB entails query, else return False. | 625941b3cad5886f8bd26d8f |
def set_etc_hosts_address(hostname, ip): <NEW_LINE> <INDENT> def read_file(path): <NEW_LINE> <INDENT> with open(path) as file: <NEW_LINE> <INDENT> contents = file.read() <NEW_LINE> <DEDENT> return contents <NEW_LINE> <DEDENT> def write_etc_hosts(text): <NEW_LINE> <INDENT> assert text.strip() <NEW_LINE> with open('/etc/... | Set host address in /etc/hosts from device address. | 625941b307d97122c4178637 |
def _csr_begin(self): <NEW_LINE> <INDENT> assert 0 <= self._slot <= 4, "Provided slot must be between 0 and 4." <NEW_LINE> self._key = bytearray(64) <NEW_LINE> if self.private_key: <NEW_LINE> <INDENT> self._atecc.gen_key(self._key, self._slot, self.private_key) <NEW_LINE> return <NEW_LINE> <DEDENT> self._atecc.gen_key(... | Initializes CSR generation. | 625941b310dbd63aa1bd295e |
def ComputeCG(self,var): <NEW_LINE> <INDENT> Utw = self.basis.T * self.atoms.masses() <NEW_LINE> return 2.0 / self.box * np.dot(Utw,var) | Computes CG momenta or positions
CG = U^t * Mass * var
var could be atomic positions or velocities | 625941b3d10714528d5ffa93 |
def test_office_with_invalid_Keys(self): <NEW_LINE> <INDENT> response = self.app_test_client.post('/api/v1/office/add',json=self.invalid_keys) <NEW_LINE> self.assertEqual(response.status_code,400) | test adding office with an invalid key | 625941b3925a0f43d2549c20 |
def find_translation(self, translated_polyhedron): <NEW_LINE> <INDENT> no_translation_exception = ValueError('polyhedron is not a translation of self') <NEW_LINE> if ( set(self.rays()) != set(translated_polyhedron.rays()) or set(self.lines()) != set(translated_polyhedron.lines()) or self.n_vertices() != translated_poly... | Return the translation vector to ``translated_polyhedron``.
INPUT:
- ``translated_polyhedron`` -- a polyhedron.
OUTPUT:
A `\ZZ`-vector that translates ``self`` to
``translated_polyhedron``. A ``ValueError`` is raised if
``translated_polyhedron`` is not a translation of ``self``,
this can be used to check that two p... | 625941b3236d856c2ad4458d |
def shape(self, original_shape): <NEW_LINE> <INDENT> shape = original_shape <NEW_LINE> for processor in self._stack: <NEW_LINE> <INDENT> shape = processor.shape(shape) <NEW_LINE> <DEDENT> return shape | Return output shape of state
Args:
original_shape: tuple containing original state
Returns: tuple containing processed state shape | 625941b315baa723493c3d1f |
def canonicalize_bond(arr): <NEW_LINE> <INDENT> container_type = type(arr) <NEW_LINE> if len(arr) == 0: <NEW_LINE> <INDENT> raise ValueError("zero sized array") <NEW_LINE> <DEDENT> elif len(arr) == 1: <NEW_LINE> <INDENT> return arr <NEW_LINE> <DEDENT> elif arr[0] > arr[-1]: <NEW_LINE> <INDENT> return container_type(rev... | Canonicalize a bonded interaction. If arr[0] < arr[-1] then arr is
returned, else if arr[0] > arr[-1], then arr[::-1] is returned. If
arr[0] == arr[-1] then an exception is thrown.
Parameters
----------
arr: list of int
Bond indices.
Returns
-------
arr: list of int
Canonicalized bond indices. | 625941b3ad47b63b2c509d3a |
def add_parent(self, id, tob, mate_id=-1, mate_tob=-1): <NEW_LINE> <INDENT> if mate_id == -1: <NEW_LINE> <INDENT> self.action_history.append(np.array([id, tob])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.action_history.append(np.array([[id, tob], [mate_id, mate_tob]])) | Add parent information to logs | 625941b33539df3088e2e0f9 |
def protocol_0501(abf): <NEW_LINE> <INDENT> assert isinstance(abf, pyabf.ABF) <NEW_LINE> timeSec1, timeSec2 = 1.10, 1.30 <NEW_LINE> p1, p2 = int(timeSec1*abf.dataRate), int(timeSec2*abf.dataRate) <NEW_LINE> plotFigNew(abf) <NEW_LINE> shadeDigitalOutput(abf, 4) <NEW_LINE> for sweep in abf.sweepList: <NEW_LINE> <INDENT> ... | 0501 opto -50.pro | 625941b3a79ad161976cbef3 |
def detail_with_a_future_question(self): <NEW_LINE> <INDENT> future_poll = create_poll(poll_text="Future poll", days=5) <NEW_LINE> response = self.client.get( reverse('polls:detail'), args=(future_poll.id,)) <NEW_LINE> self.assertEqual(response.status_code, 404) | The detail view of a poll with `publication_date` in the future should
return 404. | 625941b3d7e4931a7ee9dcc9 |
def upload_config(config, config_folder_names=[], config_files={}): <NEW_LINE> <INDENT> names = config_folder_names + config_files.keys() <NEW_LINE> ctx = dict(map(lambda name: (name, '%s/%s' % (config.path, name)), names)) <NEW_LINE> tmpfolder = mkdtemp() <NEW_LINE> listify = lambda what: what if isinstance(what, list... | Common code to upload puppet and chef config files
to remote server.
Heavily based on upload procedure from fabric-provision:
https://github.com/caffeinehit/fabric-provision/blob/master/provision/__init__.py | 625941b36e29344779a623c4 |
def render_diff(a, b, n=3): <NEW_LINE> <INDENT> actions = [] <NEW_LINE> chunks = [] <NEW_LINE> for group in SequenceMatcher(None, a, b).get_grouped_opcodes(n): <NEW_LINE> <INDENT> old_line, old_end, new_line, new_end = group[0][1], group[-1][2], group[0][3], group[-1][4] <NEW_LINE> lines = [] <NEW_LINE> def add_line(ol... | Parse the diff an return data for the template. | 625941b34e696a04525c9203 |
def _ComputeDiskStatusInner(self, instance, snode_uuid, node_uuid2name_fn, dev): <NEW_LINE> <INDENT> drbd_info = None <NEW_LINE> if dev.dev_type in constants.DTS_DRBD: <NEW_LINE> <INDENT> if dev.logical_id[0] == instance.primary_node: <NEW_LINE> <INDENT> snode_uuid = dev.logical_id[1] <NEW_LINE> <DEDENT> else: <NEW_LIN... | Compute block device status.
@attention: The device has to be annotated already. | 625941b36fece00bbac2d4e8 |
def updateRange(service, SpreadsheetId, SheetName, req_range, sheetData): <NEW_LINE> <INDENT> requestBody = {'values': sheetData} <NEW_LINE> a1Note = "'{0}'!{1}".format(SheetName, req_range) <NEW_LINE> spreadServ = service.spreadsheets().values() <NEW_LINE> returnedRange = spreadServ.update( spreadsheetId=SpreadsheetId... | Update | 625941b326238365f5f0ec16 |
def __str__(self): <NEW_LINE> <INDENT> return self.text[:50] + "..." | Devolve uma representação em string do modelo | 625941b330bbd722463cbb70 |
def clean_logs(self): <NEW_LINE> <INDENT> logger.info("Cleaning logs") <NEW_LINE> restart = False <NEW_LINE> if self.running: <NEW_LINE> <INDENT> logger.warn("The cluster needs to be stopped before cleaning.") <NEW_LINE> self.stop() <NEW_LINE> restart = True <NEW_LINE> <DEDENT> action = Remote("rm -f " + self.logs_file... | Remove all MongoDB logs. | 625941b34527f215b584c212 |
def test_get_statistics_top_country_empty(self): <NEW_LINE> <INDENT> top_country_name_empty = InstallationStatistics.get_statistics_top_country([]) <NEW_LINE> self.assertEqual(top_country_name_empty, '') | Test get_statistics_top_country method of the Installation statistics with an empty parameter. | 625941b3377c676e91271f60 |
def getLayoutNames(self): <NEW_LINE> <INDENT> if self._db == '': <NEW_LINE> <INDENT> raise FMError('No database was selected') <NEW_LINE> <DEDENT> request = [] <NEW_LINE> request.append(uu({'-db': self._db})) <NEW_LINE> request.append(uu({'-layoutnames': ''})) <NEW_LINE> result = self._doRequest(request) <NEW_LINE> res... | This function returns the list of layouts for the current db. | 625941b3b545ff76a8913bce |
def twoSum(self, nums, target): <NEW_LINE> <INDENT> sortNum = sorted( nums ) <NEW_LINE> i, j = 0, len(nums) - 1 <NEW_LINE> while i < j: <NEW_LINE> <INDENT> temp = sortNum[i] + sortNum[j] <NEW_LINE> if temp < target: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> <DEDENT> elif temp > target: <NEW_LINE> <INDENT> j -= 1 <NEW_LINE>... | :type nums: List[int]
:type target: int
:rtype: List[int] | 625941b350485f2cf553cb46 |
def _validate_metric_name(name): <NEW_LINE> <INDENT> if not _VALID_PARAM_AND_METRIC_NAMES.match(name): <NEW_LINE> <INDENT> raise Exception("Invalid metric name: '%s'. %s" % (name, _BAD_CHARACTERS_MESSAGE)) <NEW_LINE> <DEDENT> if _path_not_unique(name): <NEW_LINE> <INDENT> raise Exception("Invalid metric name: '%s'. %s"... | Check that `name` is a valid metric name and raise an exception if it isn't. | 625941b38da39b475bd64d24 |
def find(mal, regex, filtering='all', extra=False, user=None): <NEW_LINE> <INDENT> items = mal.find(regex, extra=extra, user=user) <NEW_LINE> if len(items) == 0: <NEW_LINE> <INDENT> print(color.colorize("No matches in list ᕙ(⇀‸↼‶)ᕗ", 'red')) <NEW_LINE> return <NEW_LINE> <DEDENT> if filtering != 'all': <NEW_LINE> <INDEN... | Find all anime in a certain status given a regex. | 625941b3baa26c4b54cb0ed2 |
def findshift(imagenum1, imagenum2, paramlog): <NEW_LINE> <INDENT> match=paramlog[paramlog['Filenumber']==imagenum1] <NEW_LINE> if len(match)==1: <NEW_LINE> <INDENT> fname1=match.iloc[0]['Filename'].replace('.sem','.jpg') <NEW_LINE> image1=Image.open(fname1) <NEW_LINE> imshiftx1=match.iloc[0]['ImageshiftX'] <NEW_LINE> ... | Pass pre and post- images, determine stage drift in microns and any uncorrected pixel shift and print it
error is returned from register_translation | 625941b326238365f5f0ec17 |
def pformat_atom_detail(atom_detail, indent=0): <NEW_LINE> <INDENT> detail_type = logbook.atom_detail_type(atom_detail) <NEW_LINE> lines = ["%s%s: '%s'" % (" " * (indent), detail_type, atom_detail.name)] <NEW_LINE> lines.extend(_format_shared(atom_detail, indent=indent + 1)) <NEW_LINE> lines.append("%s- version = %s" %... | Pretty formats a atom detail. | 625941b367a9b606de4a7c6b |
def runCD(cd): <NEW_LINE> <INDENT> logger.debug("runCD: %s", cd) <NEW_LINE> def preexec(): <NEW_LINE> <INDENT> resource.setrlimit(resource.RLIMIT_CPU, (LIMIT_SEC, LIMIT_SEC)) <NEW_LINE> <DEDENT> if cd.state != 'Q': <NEW_LINE> <INDENT> raise RuntimeError("This CD is not in state queued! (%s)"%cd) <NEW_LINE> <DEDENT> cd.... | Run a single CD method and return.
Use `run()` as the main entry point. | 625941b338b623060ff0aba4 |
def _empty_adaptor(self): <NEW_LINE> <INDENT> LOG.debug("The adaptor is empty") <NEW_LINE> map(self.inhibit, self.outgoingTransitions(self.getInitial())) | The adaptor inhibits everything and thus it is empty
This method is called when it inhibits the initial state | 625941b3d164cc6175782afb |
def test_browserlayer_removed(self): <NEW_LINE> <INDENT> from edi.itunesquizz.interfaces import IEdiItunesquizzLayer <NEW_LINE> from plone.browserlayer import utils <NEW_LINE> self.assertNotIn( IEdiItunesquizzLayer, utils.registered_layers()) | Test that IEdiItunesquizzLayer is removed. | 625941b36fece00bbac2d4e9 |
def get(self,request): <NEW_LINE> <INDENT> print("没有使用优化ORM-起始时间:{}".format(datetime.datetime.now())) <NEW_LINE> obj_list = models.UserProfile.objects.filter(id__lte=5) <NEW_LINE> print("数据量:{}".format(len(obj_list))) <NEW_LINE> for foo in obj_list: <NEW_LINE> <INDENT> temp = foo.name <NEW_LINE> <DEDENT> print("没有使用优化O... | 小数据查询,验证sql语句
:param request:
:return: | 625941b3507cdc57c6306a80 |
def exportLabelledArrayWithHeader( self, arr, names, header, fname, sep=',', format='%f' ): <NEW_LINE> <INDENT> if names != None: <NEW_LINE> <INDENT> assert arr.shape[0] == len( names ), '\n ... rows must equal number of names!' <NEW_LINE> <DEDENT> if header != None: <NEW_LINE> <INDENT> assert arr.shape[1] == len( hea... | Export an array with row names and header
- *arr* the an array like object
- *names* the list of row names
- *header* the list of column names
- *fname* the output filename
- *sep* [default=','] the column separator
- *format* [default='%s'] the output number format
- *appendlist* [default=False] if True append... | 625941b37047854f462a11bc |
def get_ResPhase(self, **kwargs): <NEW_LINE> <INDENT> rp = ResPhase(self._Z, **kwargs) <NEW_LINE> return rp | returns a ResPhase object from z_object | 625941b363d6d428bbe442a5 |
def test_suite(): <NEW_LINE> <INDENT> print("tests for turn clockwise") <NEW_LINE> test(turn_clockwise("N") == "E") <NEW_LINE> test(turn_clockwise("W") == "N") <NEW_LINE> test(turn_clockwise(42) == None) <NEW_LINE> test(turn_clockwise(" ") == None) <NEW_LINE> print("\nday to name") <NEW_LINE> test(day_name(3) == "Wed... | Run the suite of tests for code in this module (this file).
| 625941b3293b9510aa2c3048 |
def set_grado_controller(self, grado_controller): <NEW_LINE> <INDENT> self.__gradoController = grado_controller | Actualiza el controlador de la entidad grado.
:param grado_controller: Controlador de grado (GradoController) | 625941b366673b3332b91e45 |
def jsSelectOne(self, q, cbId): <NEW_LINE> <INDENT> return 'onSelectTemplateObject(%s,%s,%s)' % (q(cbId), q(self.formName), q(self.insert)) | Generates the Javascript code to execute when a single object is
selected in the popup. | 625941b3d6c5a10208143df4 |
def append_nc(nc_file, var_name, dtype='f4', chunksizes=(1, 128, 128), fill_value=-9999, metadata={}, logging=logging): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> nc_obj = nc.Dataset(nc_file, 'a') <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> logging.error('Cannot write to {:s}'.format(nc_file)) <NEW_LINE> ... | Write a new (empty) variable to target NetCDF file.
input:
::
nc_file: NetCDF object, referring to target file
var_name: String, variable name of source NetCDF
metadata: dictionary of attributes, belonging to the variable. | 625941b391af0d3eaac9b7c1 |
def plot_hist(ph): <NEW_LINE> <INDENT> fl2 = "/home/amaity/Dropbox/Datasets-Analyses/ptss-poc/lace_find_normal/dfs-8000-roi-instrumentation-d8w5" <NEW_LINE> phx = pd.read_csv(fl2+"/dataset_ph"+str(ph)+".csv") <NEW_LINE> t1 = phx['time'].values <NEW_LINE> t2 = t1*1000.0 <NEW_LINE> ulim = np.max(t2) <NEW_LINE> llim = n... | Plot the Histograms
and Means and Variances
of lace benchmarks
Input the phase number | 625941b37b180e01f3dc45b7 |
def owners(self, org): <NEW_LINE> <INDENT> return self._parsed("organizations/" + org + "/owners") | List the owners of an organization. | 625941b38e05c05ec3eea11f |
def targz_extract_programm(dProgramm=None, sPackageDir=INSTALL_PACKAGES_DIR_DEFAULT): <NEW_LINE> <INDENT> if dProgramm is None: <NEW_LINE> <INDENT> log.warning(u'Targz. Не определен пакет для разархивирования') <NEW_LINE> return False <NEW_LINE> <DEDENT> remove_programm(dProgramm) <NEW_LINE> install_dir = None <NEW_LIN... | Распаковать tar архив. | 625941b32c8b7c6e89b3557a |
def test_visible(self): <NEW_LINE> <INDENT> self.assertTrue(self.ui.get_visible()) | The widget is visible. | 625941b31f037a2d8b945fae |
def origin_crop_to_target_shape(image, target_shape, origin): <NEW_LINE> <INDENT> native_shape = extract_height_width(image.shape) <NEW_LINE> target_shape = extract_height_width(target_shape) <NEW_LINE> if not crop_in_bounds(native_shape, target_shape, origin): <NEW_LINE> <INDENT> return ((0, 0) + native_shape) <NEW_LI... | Best effort to crop an image to a target shape from fixed origin
Arguments:
image An image. Either single channel (grayscale) or multi-channel (color)
target_shape Target shape of the image section to crop (height, width)
origin ... | 625941b30c0af96317bb7f98 |
def save(self, filename=None, force_unicode=False, quiet=False): <NEW_LINE> <INDENT> if filename: <NEW_LINE> <INDENT> if isinstance(filename, list): <NEW_LINE> <INDENT> file_to_use = os.path.join(*filename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> file_to_use = filename <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE... | save out as a csv or xls | 625941b391af0d3eaac9b7c2 |
def _request(self, buf, properties, **kwargs): <NEW_LINE> <INDENT> self.ensure_alive() <NEW_LINE> try: <NEW_LINE> <INDENT> input_format = properties.get("inputFormat", "text") <NEW_LINE> if input_format == "text": <NEW_LINE> <INDENT> ctype = "text/plain; charset=utf-8" <NEW_LINE> <DEDENT> elif input_format == "serializ... | Send a request to the CoreNLP server.
:param (str | bytes) buf: data to be sent with the request
:param (dict) properties: properties that the server expects
:return: request result | 625941b35fc7496912cc3735 |
def as_dict(self, include_private=False): <NEW_LINE> <INDENT> keys = [] <NEW_LINE> transactions = [] <NEW_LINE> for netw in self.networks(): <NEW_LINE> <INDENT> for key in self.keys(network=netw.name, include_private=include_private, as_dict=True): <NEW_LINE> <INDENT> keys.append(key) <NEW_LINE> <DEDENT> if self.multis... | Return wallet information in dictionary format
:param include_private: Include private key information in dictionary
:type include_private: bool
:return dict: | 625941b3fb3f5b602dac3446 |
def run_button(self): <NEW_LINE> <INDENT> self.STOP = False <NEW_LINE> self.PRACTICE_TOOL = self.practice_tool_var.get() <NEW_LINE> self.update_status_bar("Running ...") <NEW_LINE> try: <NEW_LINE> <INDENT> if self.init_thread.init.ROLES_ASSIGNED_FLAG: <NEW_LINE> <INDENT> tracker_thread = thread_generator.TrackerThread(... | Starts with Initialization and then runs Jungle Tracking Loop - Resets Stop | 625941b3e1aae11d1e749a63 |
def __getitem__(self, item): <NEW_LINE> <INDENT> indexes = self.indexes[item * self.batch_size:(item + 1) * self.batch_size] <NEW_LINE> ids_list_subset = self.ids_list[indexes] <NEW_LINE> return self.__data_generation(indexes, ids_list_subset) | return a batch | 625941b38a43f66fc4b53e1d |
def get_file_name_from_full_path(file_path): <NEW_LINE> <INDENT> file_path = file_path[file_path.rfind("/") + 1:] <NEW_LINE> if file_path.find(".") != -1: <NEW_LINE> <INDENT> file_path = file_path[:file_path.find(".")] <NEW_LINE> <DEDENT> return file_path | Remove any directory info and file extensions to just get the file name of an inputted path/file with extension
:param file_path: string path or name of file to be standardised/cleaned, potentially with a file extension
:return: string of just the file name | 625941b34a966d76dd550dba |
def testSharePictureInGridViewWithYouTube(self): <NEW_LINE> <INDENT> u._clearAllResource() <NEW_LINE> u._prepareVideo() <NEW_LINE> time.sleep(2) <NEW_LINE> d.swipe(550,1100,551,1101) <NEW_LINE> u.shareItem('YouTube') <NEW_LINE> assert d(text = 'Choose an account').wait.exists(timeout = 2000) | 1.Launch gallery activity
2.Enter Grid view
3.Long touch a pic or a video to select
4.Tap share icon
5.Tap YouTube option | 625941b3d8ef3951e32432ed |
def __init__(self, tfrecord_path, batch_size=1, num_parallel_batches=None, shuffle_buffer_size=None, repeat=None, prefetch_buffer_size=1): <NEW_LINE> <INDENT> if not tfrecord_path.endswith('.tfrecord'): <NEW_LINE> <INDENT> raise ValueError('The TFRecord path must end with ".tfrecord", however ' 'the path you specified ... | Initialize the database object.
Store the initialization parameters and read in the metadata from the
metadata file. | 625941b3a79ad161976cbef5 |
def build_state(self): <NEW_LINE> <INDENT> waypoint = self.planner.next_waypoint() <NEW_LINE> inputs = self.env.sense(self) <NEW_LINE> state = (waypoint, inputs['light']) <NEW_LINE> return state | The build_state function is called when the agent requests data from the
environment. The next waypoint, the intersection inputs, and the deadline
are all features available to the agent. | 625941b36e29344779a623c6 |
def check_syntax(arg: str) -> None: <NEW_LINE> <INDENT> if ',' in arg: <NEW_LINE> <INDENT> print(MSG_ERROR + " Do not use commas (','), is an illegal char here.") <NEW_LINE> raise SyntaxError("Do not use commas (',')") | Check strings syntax. Gitcher does not avoid to use commas ','
in string values.
:param arg: Argument to check syntax
:type arg: str
:return: True or false
:rtype: bool
:raise SyntaxError: If arg is illegal | 625941b34e696a04525c9205 |
def device_get_option_hc(self, f, Status, option): <NEW_LINE> <INDENT> if (option == DeviceOptionClass2.RW_TEMP_LIM_LOWER or option == DeviceOptionClass2.RW_TEMP_LIM_HIGHER): <NEW_LINE> <INDENT> optdata = c_float() <NEW_LINE> def get_optval(raw_optdata): return raw_optdata.value <NEW_LINE> <DEDENT> elif (option == Devi... | Get optional information from the device.
:param option: a DeviceOption enumeration
:return option: value for the option | 625941b30383005118ecf394 |
def compute_silhouette_flow(engine, pair): <NEW_LINE> <INDENT> with NamedTemporaryFile(suffix='.png') as exemplar_f, NamedTemporaryFile(suffix='.png') as shape_f: <NEW_LINE> <INDENT> base_pattern = np.dstack(( np.zeros(config.SHAPE_REND_SHAPE), *np.meshgrid( np.linspace(0, 1, config.SHAPE_REND_SHAPE[0]), np.lin... | Compute silhouette based flow. | 625941b3cc40096d61595704 |
def _helper_dup(ind_1,ind_2,ind_3,ind_4,group,name): <NEW_LINE> <INDENT> new_row=group.iloc[ind_1:ind_2,] <NEW_LINE> if (pd.notnull(new_row.iloc[0]['Add_equip'])): <NEW_LINE> <INDENT> print('Add_equip in first row is not empty, check:\n',name,'\n') <NEW_LINE> <DEDENT> if (pd.notnull(group.iloc[ind_3:ind_4,].iloc[0]['Ad... | helper function for recombine_dup_rows_into_one | 625941b35f7d997b8717484b |
def stop_generating_sentence(strings): <NEW_LINE> <INDENT> if len(strings) < 1: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if re.match(r"(?:\.|!+|\?+)", strings[-1]): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | returns True if the last object in a list of strings is punctuation
Used to tell if another function needs to stop generating textj | 625941b3be8e80087fb209ff |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.