query
stringlengths 1
46.9k
| pos
stringlengths 75
104k
| neg
listlengths 12
12
| scores
listlengths 12
12
|
|---|---|---|---|
Set specified value to yaml path.
Example:
patch('application/components/child/configuration/__locator.application-id','777')
Will change child app ID to 777
|
def patch_env(env, path, value):
""" Set specified value to yaml path.
Example:
patch('application/components/child/configuration/__locator.application-id','777')
Will change child app ID to 777
"""
def pathGet(dictionary, path):
for item in path.split("/"):
dictionary = dictionary[item]
return dictionary
def pathSet(dictionary, path, value):
path = path.split("/")
key = path[-1]
dictionary = pathGet(dictionary, "/".join(path[:-1]))
dictionary[key] = value
pathSet(env, path, value)
return True
|
[
"def patch(self, path, value=None):\n \"\"\" Set specified value to yaml path.\n Example:\n patch('application/components/child/configuration/__locator.application-id','777')\n Will change child app ID to 777\n \"\"\"\n # noinspection PyShadowingNames\n def pathGet(dictionary, path):\n for item in path.split(\"/\"):\n dictionary = dictionary[item]\n return dictionary\n\n # noinspection PyShadowingNames\n def pathSet(dictionary, path, value):\n path = path.split(\"/\")\n key = path[-1]\n dictionary = pathGet(dictionary, \"/\".join(path[:-1]))\n dictionary[key] = value\n\n # noinspection PyShadowingNames\n def pathRm(dictionary, path):\n path = path.split(\"/\")\n key = path[-1]\n dictionary = pathGet(dictionary, \"/\".join(path[:-1]))\n del dictionary[key]\n\n src = yaml.load(self.content)\n if value:\n pathSet(src, path, value)\n else:\n pathRm(src, path)\n self._raw_content = yaml.safe_dump(src, default_flow_style=False)\n return True",
"def patch(name)\n raise ConfigurationError, \"patch_dir has not been set\" if patch_dir.nil?\n raise ConfigurationError, \"patch_dir is not a directory\" unless Dir.exist?(patch_dir)\n Patch.from_yaml File.join(patch_dir, \"#{name}.yml\")\n end",
"public PatchBuilder set(final String path, final Object value) {\n final String[] pathTokens = delimitedListToStringArray(path, PATH_DELIMITER);\n final String[] parentPathTokens = new String[pathTokens.length - 1];\n arraycopy(pathTokens, 0, parentPathTokens, 0, parentPathTokens.length);\n final Map<String, Object> parentNode = getNode(map, parentPathTokens);\n parentNode.put(pathTokens[pathTokens.length - 1], value);\n return this;\n }",
"def patch(target, value):\n \"\"\"\n Replace the specified object\n\n :param str target: A string pointing to the target to patch.\n :param object value: The value to replace the target with.\n :return: A ``Patch`` object.\n \"\"\"\n patch = current_space().patch_for(target)\n patch.set_value(value)\n return patch",
"function apply_patch(context, patch, tracker) {\n\tconst path = patch[0];\n\tif (path.length == 0) return; //ignore replace root node\n\tconst value = patch[1];\n\tconst len = path.length - 1;\n\tconst prop = path[len];\n\n\tif (!Array.isArray(value)) {\n\t\tconst node = create_node_by_path(context, path, len, tracker);\n\t\tcommit_node_prop(context, node, prop, value, tracker);\n\t\treturn;\n\t} else {\n\t\tconst patchType = value[0];\n\n\t\tif (patchType == PatchTypes.DEL) {\n\t\t\treturn apply_delete_patch(context, path, tracker);\n\t\t}\n\t\tconst node = create_node_by_path(context, path, len, tracker);\n\t\tif (patchType == PatchTypes.REFERENCE) {\n\t\t\tconst refNode = create_node_by_path(\n\t\t\t\tcontext,\n\t\t\t\tvalue[1],\n\t\t\t\tvalue[1].length,\n\t\t\t\ttracker\n\t\t\t);\n\t\t\tcommit_node_prop(context, node, prop, refNode, tracker);\n\t\t\treturn;\n\t\t}\n\n\t\tif (patchType == PatchTypes.NODE) {\n\t\t\tif (\n\t\t\t\tvalue[1] == undefined &&\n\t\t\t\tnode_type(node[prop]) == NodeTypes.NODE\n\t\t\t\t//&& node[prop]._.length == undefined\n\t\t\t) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tconst childNode = create_node(value[1]);\n\t\t\tcommit_node_prop(context, node, prop, childNode, tracker);\n\t\t\treturn;\n\t\t}\n\t}\n}",
"function deep_set(root, path, value) {\n var twig = root;\n path.split('/').forEach(function (branch, index, branches) {\n if (branch) {\n if (self.camelize) {\n branch = branch.replace(/(\\-([a-z]))/g, function (m) { return m[1].toUpperCase(); })\n }\n if (index < branches.length - 1) {\n twig = twig[branch] || (twig[branch] = {});\n } else {\n // optimistically try treating the value as JSON\n try {\n twig[branch] = JSON.parse(value);\n } catch (e) {\n twig[branch] = value;\n }\n }\n }\n });\n}",
"def patch(self, patched_value):\n \"\"\"Set a new value for the attribute of the object.\"\"\"\n try:\n if self.getter:\n setattr(self.getter_class, self.attr_name, patched_value)\n else:\n setattr(self.orig_object, self.attr_name, patched_value)\n except TypeError:\n # Workaround for patching builtin objects:\n proxy_name = 'fudge_proxy_%s_%s_%s' % (\n self.orig_object.__module__,\n self.orig_object.__name__,\n patched_value.__class__.__name__\n )\n self.proxy_object = type(proxy_name, (self.orig_object,),\n {self.attr_name: patched_value})\n mod = sys.modules[self.orig_object.__module__]\n setattr(mod, self.orig_object.__name__, self.proxy_object)",
"def patch(self, operation, path, value, custom_headers=None, timeout=-1):\n \"\"\"Uses the PATCH to update a resource.\n\n Only one operation can be performed in each PATCH call.\n\n Args\n operation: Patch operation\n path: Path\n value: Value\n timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation\n in OneView; it just stops waiting for its completion.\n custom_headers: Allows to add custom http headers.\n\n Returns:\n Updated resource.\n \"\"\"\n patch_request_body = [{'op': operation, 'path': path, 'value': value}]\n resource_uri = self.data['uri']\n\n self.data = self.patch_request(resource_uri,\n body=patch_request_body,\n custom_headers=custom_headers,\n timeout=timeout)\n return self",
"function patch(parent$, partialChange) {\n setImmediate(() => {\n if (\n partialChange === null ||\n typeof partialChange !== \"object\" ||\n typeof parent$.value !== \"object\"\n ) {\n parent$(partialChange);\n } else {\n parent$(Object.assign({}, parent$.value, partialChange));\n }\n });\n return parent$;\n}",
"def patch(subscription_id, resource_group_name, cluster_name, application_name, api_version, parameters, custom_headers:nil)\n response = patch_async(subscription_id, resource_group_name, cluster_name, application_name, api_version, parameters, custom_headers:custom_headers).value!\n response.body unless response.nil?\n end",
"def patch(self, path, secret, mount_point=DEFAULT_MOUNT_POINT):\n \"\"\"Set or update data in the KV store without overwriting.\n\n :param path: Path\n :type path: str | unicode\n :param secret: The contents of the \"secret\" dict will be stored and returned on read.\n :type secret: dict\n :param mount_point: The \"path\" the secret engine was mounted on.\n :type mount_point: str | unicode\n :return: The JSON response of the create_or_update_secret request.\n :rtype: dict\n \"\"\"\n # First, do a read.\n try:\n current_secret_version = self.read_secret_version(\n path=path,\n mount_point=mount_point,\n )\n except exceptions.InvalidPath:\n raise exceptions.InvalidPath('No value found at \"{path}\"; patch only works on existing data.'.format(path=path))\n\n # Update existing secret dict.\n patched_secret = current_secret_version['data']['data']\n patched_secret.update(secret)\n\n # Write back updated secret.\n return self.create_or_update_secret(\n path=path,\n cas=current_secret_version['data']['metadata']['version'],\n secret=patched_secret,\n mount_point=mount_point,\n )",
"def prop(name, value, recurse = false)\n @ctx.propset(name, SvnFixture.svn_prop(value), @clean_path, recurse)\n end"
] |
[
0.863051176071167,
0.725434422492981,
0.7185708284378052,
0.7039631009101868,
0.68790602684021,
0.6823700666427612,
0.6745213866233826,
0.6723222732543945,
0.6701123714447021,
0.667013943195343,
0.6660921573638916,
0.6651179194450378
] |
Extract metadata link for starter kit from platform configs. Starter kit available on add component - starter kit menu.
Beware, config could be changed by deploy scripts during deploy.
:param name: Name of starter kit
:return: Link to metadata
|
def get_starter_kit_meta(name):
"""
Extract metadata link for starter kit from platform configs. Starter kit available on add component - starter kit menu.
Beware, config could be changed by deploy scripts during deploy.
:param name: Name of starter kit
:return: Link to metadata
"""
kits = yaml.safe_load(requests.get(url=starter_kits_url).content)['kits']
kits_meta_url = [x['metaUrl'] for x in kits if x['name'] == name]
assert len(kits_meta_url)==1, "No component %s found in meta:\n %s" % (name, kits)
meta = yaml.safe_load(requests.get(url=kits_meta_url[0]).content)['download_url']
return meta
|
[
"def get_metadata(session, name):\n \"\"\"\n Gets meta data from launchpad for the given package.\n :param session: requests Session instance\n :param name: str, package\n :return: dict, meta data\n \"\"\"\n resp = session.get(\n \"https://api.launchpad.net/1.0/{}/releases\".format(name))\n if resp.status_code == 200:\n return resp.json()\n return {}",
"def metadata(self, name):\n \"\"\"Return value and metadata associated with the named value\n\n Parameters\n ----------\n name : str\n name to retrieve. If the name contains '.'s it will be retrieved recursively\n\n Raises\n ------\n KeyError\n if name is not defined in the ConfigTree\n \"\"\"\n if name in self._children:\n return self._children[name].metadata()\n else:\n head, _, tail = name.partition('.')\n if head in self._children:\n return self._children[head].metadata(key=tail)\n else:\n raise KeyError(name)",
"def info(name):\n '''\n Get information about a service on the system\n\n Args:\n name (str): The name of the service. This is not the display name. Use\n ``get_service_name`` to find the service name.\n\n Returns:\n dict: A dictionary containing information about the service.\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' service.info spooler\n '''\n try:\n handle_scm = win32service.OpenSCManager(\n None, None, win32service.SC_MANAGER_CONNECT)\n except pywintypes.error as exc:\n raise CommandExecutionError(\n 'Failed to connect to the SCM: {0}'.format(exc.strerror))\n\n try:\n handle_svc = win32service.OpenService(\n handle_scm, name,\n win32service.SERVICE_ENUMERATE_DEPENDENTS |\n win32service.SERVICE_INTERROGATE |\n win32service.SERVICE_QUERY_CONFIG |\n win32service.SERVICE_QUERY_STATUS)\n except pywintypes.error as exc:\n raise CommandExecutionError(\n 'Failed To Open {0}: {1}'.format(name, exc.strerror))\n\n try:\n config_info = win32service.QueryServiceConfig(handle_svc)\n status_info = win32service.QueryServiceStatusEx(handle_svc)\n\n try:\n description = win32service.QueryServiceConfig2(\n handle_svc, win32service.SERVICE_CONFIG_DESCRIPTION)\n except pywintypes.error:\n description = 'Failed to get description'\n\n delayed_start = win32service.QueryServiceConfig2(\n handle_svc, win32service.SERVICE_CONFIG_DELAYED_AUTO_START_INFO)\n finally:\n win32service.CloseServiceHandle(handle_scm)\n win32service.CloseServiceHandle(handle_svc)\n\n ret = dict()\n try:\n sid = win32security.LookupAccountName(\n '', 'NT Service\\\\{0}'.format(name))[0]\n ret['sid'] = win32security.ConvertSidToStringSid(sid)\n except pywintypes.error:\n ret['sid'] = 'Failed to get SID'\n\n ret['BinaryPath'] = config_info[3]\n ret['LoadOrderGroup'] = config_info[4]\n ret['TagID'] = config_info[5]\n ret['Dependencies'] = config_info[6]\n ret['ServiceAccount'] = config_info[7]\n ret['DisplayName'] = config_info[8]\n ret['Description'] = description\n ret['Status_ServiceCode'] = status_info['ServiceSpecificExitCode']\n ret['Status_CheckPoint'] = status_info['CheckPoint']\n ret['Status_WaitHint'] = status_info['WaitHint']\n ret['StartTypeDelayed'] = delayed_start\n\n flags = list()\n for bit in SERVICE_TYPE:\n if isinstance(bit, int):\n if config_info[0] & bit:\n flags.append(SERVICE_TYPE[bit])\n\n ret['ServiceType'] = flags if flags else config_info[0]\n\n flags = list()\n for bit in SERVICE_CONTROLS:\n if status_info['ControlsAccepted'] & bit:\n flags.append(SERVICE_CONTROLS[bit])\n\n ret['ControlsAccepted'] = flags if flags else status_info['ControlsAccepted']\n\n try:\n ret['Status_ExitCode'] = SERVICE_ERRORS[status_info['Win32ExitCode']]\n except KeyError:\n ret['Status_ExitCode'] = status_info['Win32ExitCode']\n\n try:\n ret['StartType'] = SERVICE_START_TYPE[config_info[1]]\n except KeyError:\n ret['StartType'] = config_info[1]\n\n try:\n ret['ErrorControl'] = SERVICE_ERROR_CONTROL[config_info[2]]\n except KeyError:\n ret['ErrorControl'] = config_info[2]\n\n try:\n ret['Status'] = SERVICE_STATE[status_info['CurrentState']]\n except KeyError:\n ret['Status'] = status_info['CurrentState']\n\n return ret",
"def get_meta_type_by_name(name):\n data = get_default_metadata_data()\n child_data = get_child_metadata_data()\n for item in data[\"metadataObjects\"]:\n if 'xmlName' in item and item['xmlName'] == name:\n return item\n for item in child_data:\n if 'xmlName' in item and item['xmlName'] == name:\n return item\n '''\n > quick and dirty fix for users experiencing issues with \"newer\" metadata types not properly tested by mm\n > if the project has a cached .describe, let's use that to detect metadata types\n '''\n try:\n if config.describe_data != None:\n project_org_describe = config.describe_data\n if config.project != None and os.path.isfile(os.path.join(config.project.location,'config','.describe')):\n project_org_describe = parse_json_from_file(os.path.join(config.project.location,'config','.describe'))\n if project_org_describe != None and 'metadataObjects' in project_org_describe:\n for item in project_org_describe[\"metadataObjects\"]:\n if 'xmlName' in item and item['xmlName'] == name:\n return item\n except:\n pass",
"def _get_metadata_model(name=None):\n \"\"\"Find registered Metadata object.\"\"\"\n if name is not None:\n try:\n return registry[name]\n except KeyError:\n if len(registry) == 1:\n valid_names = 'Try using the name \"%s\" or simply leaving it '\\\n 'out altogether.' % list(registry)[0]\n else:\n valid_names = \"Valid names are \" + \", \".join(\n '\"%s\"' % k for k in list(registry))\n raise Exception(\n \"Metadata definition with name \\\"%s\\\" does not exist.\\n%s\" % (\n name, valid_names))\n else:\n assert len(registry) == 1, \\\n \"You must have exactly one Metadata class, if using \" \\\n \"get_metadata() without a 'name' parameter.\"\n return list(registry.values())[0]",
"def get_meta(self, name, meta_key=None):\n '''Get the ``content`` attribute of a meta tag ``name``.\n\n For example::\n\n head.get_meta('decription')\n\n returns the ``content`` attribute of the meta tag with attribute\n ``name`` equal to ``description`` or ``None``.\n If a different meta key needs to be matched, it can be specified via\n the ``meta_key`` parameter::\n\n head.get_meta('og:title', meta_key='property')\n '''\n meta_key = meta_key or 'name'\n for child in self.meta._children:\n if isinstance(child, Html) and child.attr(meta_key) == name:\n return child.attr('content')",
"def export(self, name):\n '''\n Export to the Kiwi config.xml as text.\n\n :return:\n '''\n\n self.name = name\n root = self._create_doc()\n self._set_description(root)\n self._set_preferences(root)\n self._set_repositories(root)\n self._set_users(root)\n self._set_packages(root)\n\n return '\\n'.join([line for line in minidom.parseString(\n etree.tostring(root, encoding='UTF-8', pretty_print=True)).toprettyxml(indent=\" \").split(\"\\n\")\n if line.strip()])",
"def parse_info(wininfo_name, egginfo_name):\n \"\"\"Extract metadata from filenames.\n\n Extracts the 4 metadataitems needed (name, version, pyversion, arch) from\n the installer filename and the name of the egg-info directory embedded in\n the zipfile (if any).\n\n The egginfo filename has the format::\n\n name-ver(-pyver)(-arch).egg-info\n\n The installer filename has the format::\n\n name-ver.arch(-pyver).exe\n\n Some things to note:\n\n 1. The installer filename is not definitive. An installer can be renamed\n and work perfectly well as an installer. So more reliable data should\n be used whenever possible.\n 2. The egg-info data should be preferred for the name and version, because\n these come straight from the distutils metadata, and are mandatory.\n 3. The pyver from the egg-info data should be ignored, as it is\n constructed from the version of Python used to build the installer,\n which is irrelevant - the installer filename is correct here (even to\n the point that when it's not there, any version is implied).\n 4. The architecture must be taken from the installer filename, as it is\n not included in the egg-info data.\n 5. Architecture-neutral installers still have an architecture because the\n installer format itself (being executable) is architecture-specific. We\n should therefore ignore the architecture if the content is pure-python.\n \"\"\"\n\n egginfo = None\n if egginfo_name:\n egginfo = egg_info_re.search(egginfo_name)\n if not egginfo:\n raise ValueError(\"Egg info filename %s is not valid\" % (egginfo_name,))\n\n # Parse the wininst filename\n # 1. Distribution name (up to the first '-')\n w_name, sep, rest = wininfo_name.partition('-')\n if not sep:\n raise ValueError(\"Installer filename %s is not valid\" % (wininfo_name,))\n\n # Strip '.exe'\n rest = rest[:-4]\n # 2. Python version (from the last '-', must start with 'py')\n rest2, sep, w_pyver = rest.rpartition('-')\n if sep and w_pyver.startswith('py'):\n rest = rest2\n w_pyver = w_pyver.replace('.', '')\n else:\n # Not version specific - use py2.py3. While it is possible that\n # pure-Python code is not compatible with both Python 2 and 3, there\n # is no way of knowing from the wininst format, so we assume the best\n # here (the user can always manually rename the wheel to be more\n # restrictive if needed).\n w_pyver = 'py2.py3'\n # 3. Version and architecture\n w_ver, sep, w_arch = rest.rpartition('.')\n if not sep:\n raise ValueError(\"Installer filename %s is not valid\" % (wininfo_name,))\n\n if egginfo:\n w_name = egginfo.group('name')\n w_ver = egginfo.group('ver')\n\n return {'name': w_name, 'ver': w_ver, 'arch': w_arch, 'pyver': w_pyver}",
"def get_namespace_from_name(name):\n \"\"\"\n can be either\n <namespace>/projects/<project_name>\n or\n <namespace>/<project_name>\n \"\"\"\n if not re.match(NAMESPACE_PATTERN, name):\n sys.exit((\"Argument '%s' doesn't match any recognized pattern:\\n\"\n \"\\tfloyd [data] init <project_or_dataset_name>\\n\"\n \"\\tfloyd [data] init <namespace>/<project_or_dataset_name>\\n\"\n \"\\tfloyd [data] init <namespace>/[projects|dataset]/<project_or_dataset_name>\\n\"\n \"\\n Note: Argument can only contain alphanumeric, hyphen-minus '-' , underscore '_' and dot '.' characters.\"\n ) % name)\n\n name_parts = name.split(\"/\", 2)\n if len(name_parts) > 1:\n return name_parts[0], name_parts[-1]\n else:\n return current_username(), name",
"def get(self, name, default=_MISSING):\n \"\"\"Get a metadata field.\"\"\"\n name = self._convert_name(name)\n if name not in self._fields:\n if default is _MISSING:\n default = self._default_value(name)\n return default\n if name in _UNICODEFIELDS:\n value = self._fields[name]\n return value\n elif name in _LISTFIELDS:\n value = self._fields[name]\n if value is None:\n return []\n res = []\n for val in value:\n if name not in _LISTTUPLEFIELDS:\n res.append(val)\n else:\n # That's for Project-URL\n res.append((val[0], val[1]))\n return res\n\n elif name in _ELEMENTSFIELD:\n value = self._fields[name]\n if isinstance(value, string_types):\n return value.split(',')\n return self._fields[name]",
"def get_metadata(self, namespace, name):\n \"Retrieve metadata\"\n\n if namespace in NAMESPACES:\n namespace = NAMESPACES[namespace]\n\n return self.metadata[namespace].get(name, [])",
"def from_name(api_url, name, dry_run=False):\n \"\"\"\n doesn't require a token config param\n as all of our data is currently public\n \"\"\"\n return DataSet(\n '/'.join([api_url, name]).rstrip('/'),\n token=None,\n dry_run=dry_run\n )"
] |
[
0.6562999486923218,
0.655454158782959,
0.643340528011322,
0.636264443397522,
0.6352569460868835,
0.6327133178710938,
0.6320620775222778,
0.6312291026115417,
0.6309686899185181,
0.6300292611122131,
0.6295232772827148,
0.6292681694030762
] |
Extact manifest url from metadata url
:param metaurl: Url to metadata
:param name: Name of application to extract
:return:
|
def get_manifest_from_meta(metaurl, name):
"""
Extact manifest url from metadata url
:param metaurl: Url to metadata
:param name: Name of application to extract
:return:
"""
if 'http' in metaurl:
kit = yaml.safe_load(requests.get(url=metaurl).content)['kit']['applications']
else:
kit = yaml.safe_load(open(metaurl).read())['kit']['applications']
app_urls = [x['manifest'] for x in kit if x['name'] == name]
assert len(app_urls) == 1
return app_urls[0]
|
[
"def set_applications_from_meta(self, metadata, exclude=None):\n \"\"\"\n Parses meta and update or create each application\n :param str metadata: path or url to meta.yml\n :param list[str] exclude: List of application names, to exclude from meta.\n This might be need when you use meta as list of dependencies\n \"\"\"\n if not exclude:\n exclude = []\n if metadata.startswith('http'):\n meta = yaml.safe_load(requests.get(url=metadata).content)\n else:\n # noinspection PyArgumentEqualDefault\n meta = yaml.safe_load(open(metadata, 'r').read())\n\n applications = []\n for app in meta['kit']['applications']:\n if app['name'] not in exclude:\n applications.append({\n 'name': app['name'],\n 'url': app['manifest']})\n self.restore({'applications': applications})",
"def fetch_metadata(url, path, maxage=600):\n \"\"\"\n :param url: metadata remote location\n :param path: metdata file name\n :param maxage: if max age of existing metadata file (s) is exceeded,\n the file will be fetched from the remote location\n \"\"\"\n fetch = False\n if not os.path.isfile(path):\n fetch = True\n logger.debug(\"metadata file %s not found\", path)\n elif (os.path.getmtime(path) + maxage) < time.time():\n fetch = True\n logger.debug(\"metadata file %s from %s is more than %s s old\",\n path,\n strftime(\"%Y-%m-%d %H:%M:%S\", time.localtime(os.path.getmtime(path))),\n maxage)\n else:\n logger.debug(\"metadata file %s is less than %s s old\", path, maxage)\n if fetch:\n f=urllib.URLopener()\n try:\n f.retrieve(url, path)\n logger.debug(\"downloaded metadata from %s into %s\", url, path)\n except:\n logger.debug(\"downloaded metadata from %s failed: %s\",\n url, sys.exc_info()[0])",
"def url_to_tile(url):\n \"\"\"\n Extracts tile name, date and AWS index from tile url on AWS.\n\n :param url: class input parameter 'metafiles'\n :type url: str\n :return: Name of tile, date and AWS index which uniquely identifies tile on AWS\n :rtype: (str, str, int)\n \"\"\"\n info = url.strip('/').split('/')\n name = ''.join(info[-7: -4])\n date = '-'.join(info[-4: -1])\n return name, date, int(info[-1])",
"def get_url_metadata(self):\n \"\"\"Gets the metadata for the url.\n\n return: (osid.Metadata) - metadata for the url\n *compliance: mandatory -- This method must be implemented.*\n\n \"\"\"\n # Implemented from template for osid.resource.ResourceForm.get_group_metadata_template\n metadata = dict(self._mdata['url'])\n metadata.update({'existing_string_values': self._my_map['url']})\n return Metadata(**metadata)",
"def extract_metainfo_files_from_package(\n package,\n output_folder,\n debug=False\n ):\n \"\"\" Extracts metdata files from the given package to the given folder,\n which may be referenced in any way that is permitted in\n a requirements.txt file or install_requires=[] listing.\n\n Current supported metadata files that will be extracted:\n\n - pytoml.yml (only if package wasn't obtained as wheel)\n - METADATA\n \"\"\"\n\n if package is None:\n raise ValueError(\"package cannot be None\")\n\n if not os.path.exists(output_folder) or os.path.isfile(output_folder):\n raise ValueError(\"output folder needs to be existing folder\")\n\n # A temp folder for making a package copy in case it's a local folder,\n # because extracting metadata might modify files\n # (creating sdists/wheels...)\n temp_folder = tempfile.mkdtemp(prefix=\"pythonpackage-package-copy-\")\n try:\n # Package is indeed a folder! Get a temp copy to work on:\n if is_filesystem_path(package):\n shutil.copytree(\n parse_as_folder_reference(package),\n os.path.join(temp_folder, \"package\")\n )\n package = os.path.join(temp_folder, \"package\")\n\n # Because PEP517 can be noisy and contextlib.redirect_* fails to\n # contain it, we will run the actual analysis in a separate process:\n try:\n subprocess.check_output([\n sys.executable,\n \"-c\",\n \"import importlib\\n\"\n \"import json\\n\"\n \"import os\\n\"\n \"import sys\\n\"\n \"sys.path = [os.path.dirname(sys.argv[3])] + sys.path\\n\"\n \"m = importlib.import_module(\\n\"\n \" os.path.basename(sys.argv[3]).partition('.')[0]\\n\"\n \")\\n\"\n \"m._extract_metainfo_files_from_package_unsafe(\"\n \" sys.argv[1],\"\n \" sys.argv[2],\"\n \")\",\n package, output_folder, os.path.abspath(__file__)],\n stderr=subprocess.STDOUT, # make sure stderr is muted.\n cwd=os.path.join(os.path.dirname(__file__), \"..\")\n )\n except subprocess.CalledProcessError as e:\n output = e.output.decode(\"utf-8\", \"replace\")\n if debug:\n print(\"Got error obtaining meta info.\")\n print(\"Detail output:\")\n print(output)\n print(\"End of Detail output.\")\n raise ValueError(\n \"failed to obtain meta info - \"\n \"is '{}' a valid package? \"\n \"Detailed output:\\n{}\".format(package, output)\n )\n finally:\n shutil.rmtree(temp_folder)",
"def metadata_from_fname(self, fname):\n \"\"\"Return meta data extracted from file name.\n \n :param fname: metadata file name\n :returns: dynamically created :class:`collections.namedtuple`\n \"\"\"\n MetaData = namedtuple('MetaData', self.split_order)\n base_name = os.path.basename(fname) # e.g. 'test_S1_C2_Z3_T4.tif'\n name, suffix = base_name.split('.') # e.g. 'test_S1_C2_Z3_T4', 'tif'\n data = name.split('_')[-len(self.split_order):] # e.g. ['S1', 'C2', 'Z3', 'T4']\n args = [ int(x[1:]) for x in data ] # e.g. [1, 2, 3, 4]\n return MetaData(*args)",
"def pypi_metadata_extension(extraction_fce):\n \"\"\"Extracts data from PyPI and merges them with data from extraction\n method.\n \"\"\"\n\n def inner(self, client=None):\n data = extraction_fce(self)\n if client is None:\n logger.warning(\"Client is None, it was probably disabled\")\n data.update_attr('source0', self.archive.name)\n return data\n try:\n release_data = client.release_data(self.name, self.version)\n except BaseException:\n logger.warning(\"Some kind of error while communicating with \"\n \"client: {0}.\".format(client), exc_info=True)\n return data\n try:\n url, md5_digest = get_url(client, self.name, self.version)\n except exc.MissingUrlException:\n url, md5_digest = ('FAILED TO EXTRACT FROM PYPI',\n 'FAILED TO EXTRACT FROM PYPI')\n data_dict = {'source0': url, 'md5': md5_digest}\n\n for data_field in settings.PYPI_USABLE_DATA:\n data_dict[data_field] = release_data.get(data_field, '')\n\n # we usually get better license representation from trove classifiers\n data_dict[\"license\"] = license_from_trove(release_data.get(\n 'classifiers', ''))\n data.set_from(data_dict, update=True)\n return data\n return inner",
"def venv_metadata_extension(extraction_fce):\n \"\"\"Extracts specific metadata from virtualenv object, merges them with data\n from given extraction method.\n \"\"\"\n\n def inner(self):\n data = extraction_fce(self)\n if virtualenv is None or not self.venv:\n logger.debug(\"Skipping virtualenv metadata extraction.\")\n return data\n\n temp_dir = tempfile.mkdtemp()\n try:\n extractor = virtualenv.VirtualEnv(self.name, temp_dir,\n self.name_convertor,\n self.base_python_version)\n data.set_from(extractor.get_venv_data, update=True)\n except exc.VirtualenvFailException as e:\n logger.error(\"{}, skipping virtualenv metadata extraction.\".format(\n e))\n finally:\n shutil.rmtree(temp_dir)\n return data\n return inner",
"def _un_meta_name(self, name):\n \"\"\"\n Reverse of _meta_name\n \"\"\"\n if name.startswith('HTTP_'):\n name = name[5:]\n return name.replace('_', '-').title()",
"def _DownloadUrl(self, url, dest_dir):\n \"\"\"Download a script from a given URL.\n\n Args:\n url: string, the URL to download.\n dest_dir: string, the path to a directory for storing metadata scripts.\n\n Returns:\n string, the path to the file storing the metadata script.\n \"\"\"\n dest_file = tempfile.NamedTemporaryFile(dir=dest_dir, delete=False)\n dest_file.close()\n dest = dest_file.name\n\n self.logger.info('Downloading url from %s to %s.', url, dest)\n try:\n urlretrieve.urlretrieve(url, dest)\n return dest\n except (httpclient.HTTPException, socket.error, urlerror.URLError) as e:\n self.logger.warning('Could not download %s. %s.', url, str(e))\n except Exception as e:\n self.logger.warning('Exception downloading %s. %s.', url, str(e))\n return None",
"def _load_manifest_from_url(manifest, url, verify_certificate=True, username=None, password=None):\n \"\"\" load a url body into a manifest \"\"\"\n try:\n if username and password:\n manifest_file_handler = StringIO(lib.authenticated_get(username, password, url,\n verify=verify_certificate).decode(\"utf-8\"))\n else:\n manifest_file_handler = StringIO(lib.cleaned_request(\n 'get', url, verify=verify_certificate\n ).text)\n manifest.readfp(manifest_file_handler)\n except requests.exceptions.RequestException:\n logger.debug(\"\", exc_info=True)\n error_message = sys.exc_info()[1]\n raise ManifestException(\"There was an error retrieving {0}!\\n {1}\".format(url, str(error_message)))",
"def get_manylinux_wheel_url(self, package_name, package_version):\n \"\"\"\n For a given package name, returns a link to the download URL,\n else returns None.\n\n Related: https://github.com/Miserlou/Zappa/issues/398\n Examples here: https://gist.github.com/perrygeo/9545f94eaddec18a65fd7b56880adbae\n\n This function downloads metadata JSON of `package_name` from Pypi\n and examines if the package has a manylinux wheel. This function\n also caches the JSON file so that we don't have to poll Pypi\n every time.\n \"\"\"\n cached_pypi_info_dir = os.path.join(tempfile.gettempdir(), 'cached_pypi_info')\n if not os.path.isdir(cached_pypi_info_dir):\n os.makedirs(cached_pypi_info_dir)\n # Even though the metadata is for the package, we save it in a\n # filename that includes the package's version. This helps in\n # invalidating the cached file if the user moves to a different\n # version of the package.\n # Related: https://github.com/Miserlou/Zappa/issues/899\n json_file = '{0!s}-{1!s}.json'.format(package_name, package_version)\n json_file_path = os.path.join(cached_pypi_info_dir, json_file)\n if os.path.exists(json_file_path):\n with open(json_file_path, 'rb') as metafile:\n data = json.load(metafile)\n else:\n url = 'https://pypi.python.org/pypi/{}/json'.format(package_name)\n try:\n res = requests.get(url, timeout=float(os.environ.get('PIP_TIMEOUT', 1.5)))\n data = res.json()\n except Exception as e: # pragma: no cover\n return None\n with open(json_file_path, 'wb') as metafile:\n jsondata = json.dumps(data)\n metafile.write(bytes(jsondata, \"utf-8\"))\n\n if package_version not in data['releases']:\n return None\n\n for f in data['releases'][package_version]:\n if f['filename'].endswith(self.manylinux_wheel_file_suffix):\n return f['url']\n return None"
] |
[
0.674472451210022,
0.648524284362793,
0.6440187096595764,
0.637789785861969,
0.6317768692970276,
0.6311434507369995,
0.6276582479476929,
0.6270686984062195,
0.6214728355407715,
0.6208724975585938,
0.6203470826148987,
0.6190667748451233
] |
Function getPayloadStruct
Get the payload structure to do a creation or a modification
@param key: The key to modify
@param attribute: The data
@param objType: NOT USED in this class
@return RETURN: The API result
|
def getPayloadStruct(self, attributes, objType=None):
""" Function getPayloadStruct
Get the payload structure to do a creation or a modification
@param key: The key to modify
@param attribute: The data
@param objType: NOT USED in this class
@return RETURN: The API result
"""
if self.setInParentPayload:
return {self.parentPayloadObject:
{self.payloadObj: attributes}}
else:
return {self.payloadObj: attributes}
|
[
"def getPayloadStruct(self, attributes, objType):\n \"\"\" Function getPayloadStruct\n Get the payload structure to do a creation or a modification\n\n @param attribute: The data\n @param objType: SubItem type (e.g: hostgroup for hostgroup_class)\n @return RETURN: the payload\n \"\"\"\n payload = {self.payloadObj: attributes,\n objType + \"_class\":\n {self.payloadObj: attributes}}\n return payload",
"def getPayloadStruct(self, payload):\n \"\"\" Function getPayloadStruct\n\n @param payload: The payload structure to the object to add\n @return RETURN: A dict\n \"\"\"\n newSubItem = self.objType(self.api, 0, self.parentObjName,\n self.parentPayloadObj, self.parentKey, {})\n return newSubItem.getPayloadStruct(payload, self.parentPayloadObj)",
"def json_data(self, name):\n \"\"\"Get a JSON compatible structure for the named attribute\n \"\"\"\n\n # Check the write permission of the context\n # XXX: This should be done on field level by the field manager adapter\n if not self.can_write():\n raise Unauthorized(\"You are not allowed to modify this content\")\n\n # fetch the field by name\n field = api.get_field(self.context, name)\n\n # bail out if we have no field\n if not field:\n return None\n\n fieldmanager = IFieldManager(field)\n return fieldmanager.json_data(self.context)",
"def decode_struct(self, data_type, obj):\n \"\"\"\n The data_type argument must be a Struct.\n See json_compat_obj_decode() for argument descriptions.\n \"\"\"\n if obj is None and data_type.has_default():\n return data_type.get_default()\n elif not isinstance(obj, dict):\n raise bv.ValidationError('expected object, got %s' %\n bv.generic_type_name(obj))\n all_fields = data_type.definition._all_fields_\n for extra_permission in self.caller_permissions.permissions:\n all_extra_fields = '_all_{}_fields_'.format(extra_permission)\n all_fields = all_fields + getattr(data_type.definition, all_extra_fields, [])\n\n if self.strict:\n all_field_names = data_type.definition._all_field_names_\n for extra_permission in self.caller_permissions.permissions:\n all_extra_field_names = '_all_{}_field_names_'.format(extra_permission)\n all_field_names = all_field_names.union(\n getattr(data_type.definition, all_extra_field_names, {}))\n\n for key in obj:\n if (key not in all_field_names and\n not key.startswith('.tag')):\n raise bv.ValidationError(\"unknown field '%s'\" % key)\n ins = data_type.definition()\n self.decode_struct_fields(ins, all_fields, obj)\n # Check that all required fields have been set.\n data_type.validate_fields_only_with_permissions(ins, self.caller_permissions)\n return ins",
"def _getPayload(self, record):\n \"\"\"\n The data that will be sent to the RESTful API\n \"\"\"\n\n try:\n # top level payload items\n d = record.__dict__\n pid = d.pop('process', 'nopid')\n tid = d.pop('thread', 'notid')\n\n payload = {\n k: v for (k, v) in d.items()\n if k in TOP_KEYS\n }\n\n # logging meta attributes\n payload['meta'] = {\n k: v for (k, v) in d.items()\n if k in META_KEYS\n }\n\n # everything else goes in details\n payload['details'] = {\n k: simple_json(v) for (k, v) in d.items()\n if k not in self.detail_ignore_set\n }\n\n payload['log'] = payload.pop('name', 'n/a')\n payload['level'] = payload.pop('levelname', 'n/a')\n payload['meta']['line'] = payload['meta'].pop('lineno', 'n/a')\n\n payload['message'] = record.getMessage()\n tb = self._getTraceback(record)\n if tb:\n payload['traceback'] = tb\n\n except Exception as e:\n payload = {\n 'level': 'ERROR',\n 'message': 'could not format',\n 'exception': repr(e),\n }\n payload['pid'] = 'p-{}'.format(pid)\n payload['tid'] = 't-{}'.format(tid)\n return payload",
"function attrToJson(v, objectType) {\n\n var colValue;\n\n switch (objectType) {\n case \"Text\":\n colValue = v;\n break;\n case \"DateTime\":\n case \"datetime\": // For calculated columns, stored as datetime;#value\n // Dates have dashes instead of slashes: ows_Created=\"2009-08-25 14:24:48\"\n colValue = dateToJsonObject(v);\n break;\n case \"User\":\n colValue = userToJsonObject(v);\n break;\n case \"UserMulti\":\n colValue = userMultiToJsonObject(v);\n break;\n case \"Lookup\":\n colValue = lookupToJsonObject(v);\n break;\n case \"LookupMulti\":\n colValue = lookupMultiToJsonObject(v);\n break;\n case \"Boolean\":\n colValue = booleanToJsonObject(v);\n break;\n case \"Integer\":\n colValue = intToJsonObject(v);\n break;\n case \"Counter\":\n colValue = intToJsonObject(v);\n break;\n case \"MultiChoice\":\n colValue = choiceMultiToJsonObject(v);\n break;\n case \"Number\":\n case \"Currency\":\n case \"float\": // For calculated columns, stored as float;#value\n colValue = floatToJsonObject(v);\n break;\n case \"Calculated\":\n colValue = calcToJsonObject(v);\n break;\n case \"Attachments\":\n colValue = attachmentsToJsonObject(v);\n break;\n case \"URL\":\n colValue = urlToJsonObject(v);\n break;\n case \"JSON\":\n colValue = jsonToJsonObject(v); // Special case for text JSON stored in text columns\n break;\n default:\n // All other objectTypes will be simple strings\n colValue = v;\n break;\n }\n return colValue;\n }",
"def _override_payload(self, payload):\n \"\"\"\n This function transforms the payload into a new format using the\n self.override_payload property.\n \"\"\"\n if self.override_payload:\n old_payload = payload\n\n def get_value(data, key):\n try:\n parent_key, nested_key = key.split(\".\", 1)\n return get_value(data.get(parent_key, {}), nested_key)\n except ValueError:\n return data.get(key, key)\n\n def set_values(data):\n for key, value in data.items():\n if isinstance(value, dict):\n set_values(value)\n else:\n data[key] = get_value(old_payload, value)\n\n payload = deepcopy(self.override_payload)\n set_values(payload)\n\n return payload",
"def handle_create_payload(\n entity: BaseEntity,\n author_user: UserType,\n protocol_name: str,\n to_user_key: RsaKey = None,\n parent_user: UserType = None,\n) -> str:\n \"\"\"Create a payload with the given protocol.\n\n Any given user arguments must have ``private_key`` and ``handle`` attributes.\n\n :arg entity: Entity object to send. Can be a base entity or a protocol specific one.\n :arg author_user: User authoring the object.\n :arg protocol_name: Protocol to create payload for.\n :arg to_user_key: Public key of user private payload is being sent to, required for private payloads.\n :arg parent_user: (Optional) User object of the parent object, if there is one. This must be given for the\n Diaspora protocol if a parent object exists, so that a proper ``parent_author_signature`` can\n be generated. If given, the payload will be sent as this user.\n :returns: Built payload message (str)\n \"\"\"\n mappers = importlib.import_module(f\"federation.entities.{protocol_name}.mappers\")\n protocol = importlib.import_module(f\"federation.protocols.{protocol_name}.protocol\")\n protocol = protocol.Protocol()\n outbound_entity = mappers.get_outbound_entity(entity, author_user.private_key)\n if parent_user:\n outbound_entity.sign_with_parent(parent_user.private_key)\n send_as_user = parent_user if parent_user else author_user\n data = protocol.build_send(entity=outbound_entity, from_user=send_as_user, to_user_key=to_user_key)\n return data",
"def unstructure_attrs_asdict(self, obj):\n # type: (Any) -> Dict[str, Any]\n \"\"\"Our version of `attrs.asdict`, so we can call back to us.\"\"\"\n attrs = obj.__class__.__attrs_attrs__\n dispatch = self._unstructure_func.dispatch\n rv = self._dict_factory()\n for a in attrs:\n name = a.name\n v = getattr(obj, name)\n rv[name] = dispatch(v.__class__)(v)\n return rv",
"def get_single_payload(self, query_obj):\n \"\"\"Returns a payload of metadata and data\"\"\"\n payload = self.get_df_payload(query_obj)\n df = payload.get('df')\n status = payload.get('status')\n if status != utils.QueryStatus.FAILED:\n if df is not None and df.empty:\n payload['error'] = 'No data'\n else:\n payload['data'] = self.get_data(df)\n if 'df' in payload:\n del payload['df']\n return payload",
"private static JsonObject getObject(JsonObject object, String key)\n throws IOException {\n // Get the existing one\n if (object.containsKey(key)) {\n Object existing = object.get(key);\n if (!(existing instanceof JsonObject)) {\n throw new IOException(\"Invalid field structure, '\" + key +\n \"' expected to be an object, but incompatible \"\n + \"data type already present.\");\n }\n return (JsonObject) existing;\n\n // Or add a new one\n } else {\n JsonObject newObject = new JsonObject();\n object.put(key, newObject);\n return newObject;\n }\n }",
"def create_object(self, obj_type, payload, return_fields=None):\n \"\"\"Create an Infoblox object of type 'obj_type'\n\n Args:\n obj_type (str): Infoblox object type,\n e.g. 'network', 'range', etc.\n payload (dict): Payload with data to send\n return_fields (list): List of fields to be returned\n Returns:\n The object reference of the newly create object\n Raises:\n InfobloxException\n \"\"\"\n self._validate_obj_type_or_die(obj_type)\n\n query_params = self._build_query_params(return_fields=return_fields)\n\n url = self._construct_url(obj_type, query_params)\n opts = self._get_request_options(data=payload)\n self._log_request('post', url, opts)\n if(self.session.cookies):\n # the first 'get' or 'post' action will generate a cookie\n # after that, we don't need to re-authenticate\n self.session.auth = None\n r = self.session.post(url, **opts)\n\n self._validate_authorized(r)\n\n if r.status_code != requests.codes.CREATED:\n response = utils.safe_json_load(r.content)\n already_assigned = 'is assigned to another network view'\n if response and already_assigned in response.get('text'):\n exception = ib_ex.InfobloxMemberAlreadyAssigned\n else:\n exception = ib_ex.InfobloxCannotCreateObject\n raise exception(\n response=response,\n obj_type=obj_type,\n content=r.content,\n args=payload,\n code=r.status_code)\n\n return self._parse_reply(r)"
] |
[
0.8354079723358154,
0.7295766472816467,
0.6407061815261841,
0.6358272433280945,
0.6252712607383728,
0.6211220622062683,
0.6209088563919067,
0.6204599142074585,
0.6190513968467712,
0.6182016134262085,
0.6174253821372986,
0.6140912771224976
] |
Function log
Decorator to log lasts request before sending a new one
@return RETURN: None
|
def log(function):
""" Function log
Decorator to log lasts request before sending a new one
@return RETURN: None
"""
def _log(self, *args, **kwargs):
ret = function(self, *args, **kwargs)
if len(self.history) > self.maxHistory:
self.history = self.history[1:self.maxHistory]
self.history.append({'errorMsg': self.errorMsg,
'payload': self.payload,
'url': self.url,
'resp': self.resp,
'res': self.res,
'printErrors': self.printErrors,
'method': self.method})
self.clearReqVars()
return ret
return _log
|
[
"def log_request(self, handler: RequestHandler) -> None:\n \"\"\"Writes a completed HTTP request to the logs.\n\n By default writes to the python root logger. To change\n this behavior either subclass Application and override this method,\n or pass a function in the application settings dictionary as\n ``log_function``.\n \"\"\"\n if \"log_function\" in self.settings:\n self.settings[\"log_function\"](handler)\n return\n if handler.get_status() < 400:\n log_method = access_log.info\n elif handler.get_status() < 500:\n log_method = access_log.warning\n else:\n log_method = access_log.error\n request_time = 1000.0 * handler.request.request_time()\n log_method(\n \"%d %s %.2fms\",\n handler.get_status(),\n handler._request_summary(),\n request_time,\n )",
"def log_request(self, handler):\n \"\"\"Writes a completed HTTP request to the logs.\n\n By default writes to the tinman.application LOGGER. To change\n this behavior either subclass Application and override this method,\n or pass a function in the application settings dictionary as\n 'log_function'.\n\n :param tornado.web.RequestHandler handler: The request handler\n\n \"\"\"\n if config.LOG_FUNCTION in self.settings:\n self.settings[config.LOG_FUNCTION](handler)\n return\n if handler.get_status() < 400:\n log_method = LOGGER.info\n elif handler.get_status() < 500:\n log_method = LOGGER.warning\n else:\n log_method = LOGGER.exception\n request_time = 1000.0 * handler.request.request_time()\n log_method(\"%d %s %.2fms\", handler.get_status(),\n handler._request_summary(), request_time)",
"def log_request(self, code='-', size='-'):\n # pylint: disable=unused-argument\n \"\"\"\n This function is called during :meth:`send_response`.\n\n We override it to get a little more information logged in a somewhat\n better format. We do not use the size method argument.\n \"\"\"\n self.log('%s: HTTP status %s',\n (self._get_log_prefix(), code),\n logging.INFO)",
"def log_request(_, request, *_args, **_kwargs):\n # type: (Any, ClientRequest, str, str) -> None\n \"\"\"Log a client request.\n\n :param _: Unused in current version (will be None)\n :param requests.Request request: The request object.\n \"\"\"\n if not _LOGGER.isEnabledFor(logging.DEBUG):\n return\n\n try:\n _LOGGER.debug(\"Request URL: %r\", request.url)\n _LOGGER.debug(\"Request method: %r\", request.method)\n _LOGGER.debug(\"Request headers:\")\n for header, value in request.headers.items():\n if header.lower() == 'authorization':\n value = '*****'\n _LOGGER.debug(\" %r: %r\", header, value)\n _LOGGER.debug(\"Request body:\")\n\n # We don't want to log the binary data of a file upload.\n if isinstance(request.body, types.GeneratorType):\n _LOGGER.debug(\"File upload\")\n else:\n _LOGGER.debug(str(request.body))\n except Exception as err: # pylint: disable=broad-except\n _LOGGER.debug(\"Failed to log request: %r\", err)",
"def log_to_logger(fn):\n \"\"\"\n Wrap a Bottle request so that a log line is emitted after it's handled.\n\n \"\"\"\n @wraps(fn)\n def _log_to_logger(*args, **kwargs):\n actual_response = fn(*args, **kwargs)\n # modify this to log exactly what you need:\n logger.info('%s %s %s %s' % (bottle.request.remote_addr,\n bottle.request.method,\n bottle.request.url,\n bottle.response.status))\n return actual_response\n return _log_to_logger",
"def _log_function(self, handler):\n \"\"\"Override Application.log_function so that what to log can be controlled.\n \"\"\"\n if handler.get_status() < 400:\n log_method = request_log.info\n elif handler.get_status() < 500:\n log_method = request_log.warning\n else:\n log_method = request_log.error\n for i in settings['LOGGING_IGNORE_URLS']:\n if handler.request.uri.startswith(i):\n log_method = request_log.debug\n break\n\n request_time = 1000.0 * handler.request.request_time()\n log_method(\"%d %s %.2fms\", handler.get_status(),\n handler._request_summary(), request_time)",
"def log_request(resp):\n \"\"\"Log a request.\"\"\"\n l = \"[{4}] {0} {1} {2} <{3}>\".format(request.remote_addr, request.method,\n request.url, request.endpoint,\n resp.status_code)\n c = str(resp.status_code)[0]\n if c in ['1', '2'] or resp.status_code == 304:\n app.http_logger.info(l)\n elif c == '3':\n app.http_logger.warn(l)\n else:\n app.http_logger.error(l)\n return resp",
"def log_this(cls, f):\n \"\"\"Decorator to log user actions\"\"\"\n @functools.wraps(f)\n def wrapper(*args, **kwargs):\n user_id = None\n if g.user:\n user_id = g.user.get_id()\n d = request.form.to_dict() or {}\n\n # request parameters can overwrite post body\n request_params = request.args.to_dict()\n d.update(request_params)\n d.update(kwargs)\n\n slice_id = d.get('slice_id')\n dashboard_id = d.get('dashboard_id')\n\n try:\n slice_id = int(\n slice_id or json.loads(d.get('form_data')).get('slice_id'))\n except (ValueError, TypeError):\n slice_id = 0\n\n stats_logger.incr(f.__name__)\n start_dttm = datetime.now()\n value = f(*args, **kwargs)\n duration_ms = (datetime.now() - start_dttm).total_seconds() * 1000\n\n # bulk insert\n try:\n explode_by = d.get('explode')\n records = json.loads(d.get(explode_by))\n except Exception:\n records = [d]\n\n referrer = request.referrer[:1000] if request.referrer else None\n logs = []\n for record in records:\n try:\n json_string = json.dumps(record)\n except Exception:\n json_string = None\n log = cls(\n action=f.__name__,\n json=json_string,\n dashboard_id=dashboard_id,\n slice_id=slice_id,\n duration_ms=duration_ms,\n referrer=referrer,\n user_id=user_id)\n logs.append(log)\n\n sesh = db.session()\n sesh.bulk_save_objects(logs)\n sesh.commit()\n return value\n\n return wrapper",
"def log_request(self, code='-', size='-'):\n \"\"\"Logs the current request.\"\"\"\n print_size = getattr(thread_local, 'size', -1)\n if size != '-':\n size_str = ' (%s)' % size\n elif print_size >= 0:\n size_str = self.log_size_string(print_size) + ' '\n else:\n size_str = ''\n if not self.server.suppress_noise or (code != 200 and code != 304):\n self.log_message(\n '%s\"%s\" %s', size_str, self.requestline, str(code))\n if print_size >= 0:\n thread_local.size = -1",
"def log_request(request: str, trim_log_values: bool = False, **kwargs: Any) -> None:\n \"\"\"Log a request\"\"\"\n return log_(request, request_logger, logging.INFO, trim=trim_log_values, **kwargs)",
"def log_call(call_name):\n \"\"\"Log the API call to the logger.\"\"\"\n def decorator(f):\n @wraps(f)\n def wrapper(*args, **kw):\n instance = args[0]\n instance.logger.info(call_name, {\"content\": request.get_json()})\n return f(*args, **kw)\n return wrapper\n return decorator",
"def log_request_success(self, method, full_url, path, body, status_code, response, duration):\n \"\"\" Log a successful API call. \"\"\"\n # TODO: optionally pass in params instead of full_url and do urlencode only when needed\n\n # body has already been serialized to utf-8, deserialize it for logging\n # TODO: find a better way to avoid (de)encoding the body back and forth\n if body:\n try:\n body = body.decode('utf-8', 'ignore')\n except AttributeError:\n pass\n\n logger.info(\n '%s %s [status:%s request:%.3fs]', method, full_url,\n status_code, duration\n )\n logger.debug('> %s', body)\n logger.debug('< %s', response)\n\n self._log_trace(method, path, body, status_code, response, duration)"
] |
[
0.722433865070343,
0.7216631770133972,
0.7147947549819946,
0.7131589651107788,
0.7096238136291504,
0.7083010673522949,
0.7022097110748291,
0.7021543979644775,
0.7012953162193298,
0.699263334274292,
0.6988188028335571,
0.6917266249656677
] |
Function clearHistVars
Clear the variables used to get history of all vars
@return RETURN: None
|
def clearReqVars(self):
""" Function clearHistVars
Clear the variables used to get history of all vars
@return RETURN: None
"""
self.errorMsg = None
self.payload = None
self.url = None
self.resp = None
self.res = None
self.method = None
self.printErrors = None
|
[
"public function clear()\n {\n $shmid = shm_attach($this->sysvKey, $this->shmSize, $this->perm);\n shm_remove_var($shmid, self::VAR_KEY);\n }",
"public function clear()\n\t{\n\t\t$cvars = get_class_vars(__CLASS__);\n\n\t\t$this->_updatedkeys = array();\n\n\t\tforeach ($cvars as $key => $value)\n\t\t{\n\t\t\tif ($key{0} != '_')\n\t\t\t{\n\t\t\t\tunset($this->$key);\n\n\t\t\t\t$this->$key = null;\n\t\t\t}\n\t\t}\n\n\t\t$this->_updatedkeys = array();\n\t}",
"protected function reset() \n\t{\n\t //NEVER TRUST USER INPUT\n\t if (function_exists('filter_var'))\t// Adjustment for hoster without the filter extension\n\t {\n\t \t$this->_http_referer = isset($_SERVER['HTTP_REFERER']) ? filter_var($_SERVER['HTTP_REFERER'], FILTER_SANITIZE_URL) : self::REFERER_UNKNOWN ;\n\t } \n\t else \n\t {\n\t \t$this->_http_referer = isset($_SERVER['HTTP_REFERER']) ? $_SERVER['HTTP_REFERER'] : self::REFERER_UNKNOWN ;\n\t }\n\t \n\t $this->_search_engine = self::SEARCH_ENGINE_UNKNOWN ;\n\t $this->_keywords = self::KEYWORDS_UNKNOWN ;\n\t}",
"def _clear_xauth(self):\n '''\n Clear the Xauthority file and restore the environment variables.\n '''\n os.remove(self._xauth_filename)\n for varname in ['AUTHFILE', 'XAUTHORITY']:\n if self._old_xauth[varname] is None:\n del os.environ[varname]\n else:\n os.environ[varname] = self._old_xauth[varname]\n self._old_xauth = None",
"def clear_to_reset(self, config_vars):\n \"\"\"Clear all volatile information across a reset.\n\n The reset behavior is that:\n - uptime is reset to 0\n - is `has_rtc` is True, the utc_offset is preserved\n - otherwise the utc_offset is cleared to none\n \"\"\"\n\n super(ClockManagerSubsystem, self).clear_to_reset(config_vars)\n\n self.tick_counters = dict(fast=0, user1=0, user2=0, normal=0)\n\n self.is_utc = False\n self.time_offset = 0\n\n if self.has_rtc and self.stored_offset is not None:\n self.time_offset = self.stored_offset + self.uptime\n\n self.uptime = 0\n\n self.ticks['fast'] = config_vars.get('fast_tick', 0)\n self.ticks['user1'] = config_vars.get('user_tick_1', 0)\n self.ticks['user2'] = config_vars.get('user_tick_2', 0)",
"def clear_history(pymux, variables):\n \" Clear scrollback buffer. \"\n pane = pymux.arrangement.get_active_pane()\n\n if pane.display_scroll_buffer:\n raise CommandException('Not available in copy mode')\n else:\n pane.process.screen.clear_history()",
"protected function reset()\n\t{\n\t //NEVER TRUST USER INPUT\n\t if (function_exists('filter_var'))\t// Adjustment for hoster without the filter extension\n\t {\n\t $this->_http_referrer = isset($_SERVER['HTTP_REFERER']) ? filter_var($_SERVER['HTTP_REFERER'], FILTER_SANITIZE_URL) : self::REFERRER_UNKNOWN ;\n\t }\n\t else\n\t {\n\t $this->_http_referrer = isset($_SERVER['HTTP_REFERER']) ? $_SERVER['HTTP_REFERER'] : self::REFERRER_UNKNOWN ;\n\t }\n\t $this->_referrer_DNS = self::REFERRER_UNKNOWN;\n\t if ($this->_http_referrer == '' ||\n $this->_http_referrer == '-')\n\t {\n\t //ungueltiger Referrer\n\t $this->_referrer_DNS = self::REFERRER_WRONG;\n\t }\n\t}",
"def ClearAllVar(self):\n \"\"\"Clear this Value.\"\"\"\n self.value = None\n # Call OnClearAllVar on options.\n _ = [option.OnClearAllVar() for option in self.options]",
"private function resetVars()\n {\n $this->response = $this->calls = [];\n $this->hasCalls = $this->isBatchCall = false;\n }",
"public function clear()\n\t{\n\t\t$cvars = get_class_vars(__CLASS__);\n\n\t\t$this->_updatedkeys = array();\n\n\t\tforeach ($cvars as $key => $value)\n\t\t{\n\t\t\tif ($key{0} != '_')\n\t\t\t{\n\t\t\t\tunset($this->$key);\n\n\t\t\t\tif (!in_array($key, self::$_list_keys))\n\t\t\t\t{\n\t\t\t\t\t$this->$key = null;\n\t\t\t\t}\n\t\t\t\telse\n\t\t\t\t{\n\t\t\t\t\t$this->$key = array();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\n\t\t$this->_updatedkeys = array();\n\n\t\treturn true;\n\t}",
"public function resetStaticVariables()\n {\n \\oxArticleHelper::cleanup();\n \\oxSeoEncoderHelper::cleanup();\n \\oxDeliveryHelper::cleanup();\n \\oxManufacturerHelper::cleanup();\n \\oxAdminViewHelper::cleanup();\n \\oxVendorHelper::cleanup();\n }",
"public function clearHistory()\n {\n $this->historyLines = array();\n $this->historyPosition = null;\n\n if ($this->historyUnsaved !== null) {\n $this->setInput($this->historyUnsaved);\n $this->historyUnsaved = null;\n }\n\n return $this;\n }"
] |
[
0.6837462782859802,
0.6633564233779907,
0.6587758660316467,
0.6558899879455566,
0.6457203030586243,
0.6425361633300781,
0.6407960057258606,
0.6398618817329407,
0.639014482498169,
0.6376631259918213,
0.6365300416946411,
0.6364492774009705
] |
Function list
Get the list of an object
@param obj: object name ('hosts', 'puppetclasses'...)
@param filter: filter for objects
@param only_id: boolean to only return dict with name/id
@return RETURN: the list of the object
|
def list(self, obj, filter=False, only_id=False, limit=20):
""" Function list
Get the list of an object
@param obj: object name ('hosts', 'puppetclasses'...)
@param filter: filter for objects
@param only_id: boolean to only return dict with name/id
@return RETURN: the list of the object
"""
self.url = '{}{}/?per_page={}'.format(self.base_url, obj, limit)
self.method = 'GET'
if filter:
self.url += '&search={}'.format(filter)
self.resp = requests.get(url=self.url, auth=self.auth,
headers=self.headers, cert=self.ca_cert)
if only_id:
if self.__process_resp__(obj) is False:
return False
if type(self.res['results']) is list:
return dict((x['name'], x['id']) for x in self.res['results'])
elif type(self.res['results']) is dict:
r = {}
for v in self.res['results'].values():
for vv in v:
r[vv['name']] = vv['id']
return r
else:
return False
else:
return self.__process_resp__(obj)
|
[
"def apply_filter_list(func, obj):\n \"\"\"Apply `func` to list or tuple `obj` element-wise and directly otherwise.\"\"\"\n if isinstance(obj, (list, tuple)):\n return [func(item) for item in obj]\n return func(obj)",
"def _where(self, filter_fn):\n ''' use this to filter VLists, simply provide a filter function to filter the current found objects '''\n assert callable(filter_fn), 'filter_fn needs to be callable'\n return VList(i for i in self if filter_fn(i()))",
"def get_id_by_name(self, obj, name):\n \"\"\" Function get_id_by_name\n Get the id of an object\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the targeted object\n \"\"\"\n list = self.list(obj, filter='name = \"{}\"'.format(name),\n only_id=True, limit=1)\n return list[name] if name in list.keys() else False",
"function(obj, fn) {\n var _keys, filtered;\n filtered = objelity.filterObject(obj, fn);\n _keys = objelity.deepKeys(filtered);\n if (_keys.length <= 0) {\n return {};\n } else {\n return _.get(filtered, _keys[0]);\n }\n }",
"def where(self, relation, filter_fn):\n ''' use this to filter VLists, simply provide a filter function and what relation to apply it to '''\n assert type(relation).__name__ in {'str','unicode'}, 'where needs the first arg to be a string'\n assert callable(filter_fn), 'filter_fn needs to be callable'\n return VList(i for i in self if relation in i._relations() and any(filter_fn(_()) for _ in i[relation]))",
"def get(self, obj, id, sub_object=None):\n \"\"\" Function get\n Get an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the targeted object\n \"\"\"\n self.url = '{}{}/{}'.format(self.base_url, obj, id)\n self.method = 'GET'\n if sub_object:\n self.url += '/' + sub_object\n self.resp = requests.get(url=self.url, auth=self.auth,\n headers=self.headers, cert=self.ca_cert)\n if self.__process_resp__(obj):\n return self.res\n return False",
"def customFilter(self, filterFunc):\n '''\n customFilter - Apply a custom filter to elements and return a QueryableList of matches\n\n @param filterFunc <lambda/function< - A lambda/function that is passed an item, and\n returns True if the item matches (will be returned), otherwise False.\n\n @return - A QueryableList object of the same type, with only the matching objects returned.\n '''\n ret = self.__class__()\n for item in self:\n if filterFunc(item):\n ret.append(item)\n\n return ret",
"def where(cls, *filters, **keyword_filters):\n \"\"\"Retrieves objects (Samples, Classifications, etc.) from the One Codex server.\n\n Parameters\n ----------\n filters : objects\n Advanced filters to use (not implemented)\n sort : string | list, optional\n Sort the results by this field (or list of fields). By default in descending order,\n but if any of the fields start with the special character ^, sort in ascending order.\n For example, sort=['size', '^filename'] will sort by size from largest to smallest and\n filename from A-Z for items with the same size.\n limit : integer, optional\n Number of records to return. For smaller searches, this can reduce the number of\n network requests made.\n keyword_filters : strings | objects\n Filter the results by specific keywords (or filter objects, in advanced usage)\n\n Examples\n --------\n You can filter objects that are returned locally using a lambda function:\n\n # returns only samples with a filename ending in '.gz'\n my_samples = Samples.where(filter=lambda s: s.filename.endswith('.gz'))\n\n Returns\n -------\n list\n A list of all objects matching these filters. If no filters are passed, this\n matches all objects.\n \"\"\"\n check_bind(cls)\n\n # do this here to avoid passing this on to potion\n filter_func = keyword_filters.pop(\"filter\", None)\n\n public = False\n if any(x[\"rel\"] == \"instances_public\" for x in cls._resource._schema[\"links\"]):\n public = keyword_filters.pop(\"public\", False)\n\n instances_route = keyword_filters.pop(\n \"_instances\", \"instances\" if not public else \"instances_public\"\n )\n\n schema = next(l for l in cls._resource._schema[\"links\"] if l[\"rel\"] == instances_route)\n sort_schema = schema[\"schema\"][\"properties\"][\"sort\"][\"properties\"]\n where_schema = schema[\"schema\"][\"properties\"][\"where\"][\"properties\"]\n\n sort = generate_potion_sort_clause(keyword_filters.pop(\"sort\", None), sort_schema)\n limit = keyword_filters.pop(\"limit\", None if not public else 1000)\n where = {}\n\n # we're filtering by fancy objects (like SQLAlchemy's filter)\n if len(filters) > 0:\n if len(filters) == 1 and isinstance(filters[0], dict):\n where = filters[0]\n elif all(isinstance(f, six.string_types) for f in filters):\n # if it's a list of strings, treat it as an multiple \"get\" request\n where = {\"$uri\": {\"$in\": [cls._convert_id_to_uri(f) for f in filters]}}\n else:\n # we're doing some more advanced filtering\n raise NotImplementedError(\"Advanced filtering hasn't been implemented yet\")\n\n # we're filtering by keyword arguments (like SQLAlchemy's filter_by)\n if len(keyword_filters) > 0:\n for k, v in generate_potion_keyword_where(keyword_filters, where_schema, cls).items():\n if k in where:\n raise AttributeError(\"Multiple definitions for same field {}\".format(k))\n where[k] = v\n\n # the potion-client method returns an iterator (which lazily fetchs the records\n # using `per_page` instances per request) so for limiting we only want to fetch the first\n # n (and not instantiate all the available which is what would happen if we just sliced)\n cursor = getattr(cls._resource, instances_route)(\n where=where, sort=sort, per_page=DEFAULT_PAGE_SIZE\n )\n if limit is not None:\n cursor = itertools.islice(cursor, limit)\n\n # finally, apply local filtering function on objects before returning\n wrapped = [cls(_resource=r) for r in cursor]\n\n if filter_func:\n if callable(filter_func):\n wrapped = [obj for obj in wrapped if filter_func(obj) is True]\n else:\n raise OneCodexException(\n \"Expected callable for filter, got: {}\".format(type(filter_func).__name__)\n )\n\n return wrapped",
"def complex_filter(self, filter_obj):\n \"\"\"\n Returns a new QuerySet instance with filter_obj added to the filters.\n\n filter_obj can be a Q object (or anything with an add_to_query()\n method) or a dictionary of keyword lookup arguments.\n\n This exists to support framework features such as 'limit_choices_to',\n and usually it will be more natural to use other methods.\n \"\"\"\n if isinstance(filter_obj, Filter):\n clone = self._clone()\n clone._filters.add(filter_obj)\n return clone\n return self._filter_or_exclude(None, **filter_obj)",
"def simple_getter(queryset, object_regex=None, lookup_field=None):\n ''' Returns simple object_getter function for use with PluggableSite.\n It takes 'queryset' with QuerySet or Model instance, 'object_regex' with\n url regex and 'lookup_field' with lookup field.\n '''\n object_regex = object_regex or r'\\d+'\n lookup_field = lookup_field or 'pk'\n\n if isinstance(queryset, models.Model):\n qs = queryset._default_manager.all()\n elif isinstance(queryset, QuerySet) or isinstance(queryset, models.Manager):\n qs = queryset\n\n def object_getter(object_id):\n return qs.get(**{lookup_field: object_id})\n object_getter.regex = \"(?P<object_id>%s)\" % object_regex\n\n return object_getter",
"def load(self, limit=9999):\n \"\"\" Function list\n Get the list of all interfaces\n\n @param key: The targeted object\n @param limit: The limit of items to return\n @return RETURN: A ForemanItem list\n \"\"\"\n subItemList = self.api.list('{}/{}/{}'.format(self.parentObjName,\n self.parentKey,\n self.objName,\n ),\n limit=limit)\n if self.objName == 'puppetclass_ids':\n subItemList = list(map(lambda x: {'id': x}, subItemList))\n if self.objName == 'puppetclasses':\n sil_tmp = subItemList.values()\n subItemList = []\n for i in sil_tmp:\n subItemList.extend(i)\n return {x[self.index]: self.objType(self.api, x['id'],\n self.parentObjName,\n self.parentPayloadObj,\n self.parentKey,\n x)\n for x in subItemList}",
"def get(self, id_filter):\n \"\"\"Get filter by id.\n\n :param id_filter: Identifier of the Filter. Integer value and greater than zero.\n\n :return: Following dictionary:\n\n ::\n\n {‘filter’: {‘id’: < id >,\n ‘name’: < name >,\n ‘description’: < description >}}\n\n :raise InvalidParameterError: The value of id_filter is invalid.\n :raise FilterNotFoundError: Filter not registered.\n :raise DataBaseError: Networkapi failed to access the database.\n :raise XMLError: Networkapi failed to generate the XML response.\n \"\"\"\n url = 'filter/get/' + str(id_filter) + '/'\n\n code, xml = self.submit(None, 'GET', url)\n\n return self.response(code, xml)"
] |
[
0.6781755685806274,
0.6688603162765503,
0.6688332557678223,
0.6551244854927063,
0.6471869349479675,
0.6464616060256958,
0.6460150480270386,
0.6420566439628601,
0.6405892968177795,
0.6329345703125,
0.6318852305412292,
0.6314055323600769
] |
Function get
Get an object by id
@param obj: object name ('hosts', 'puppetclasses'...)
@param id: the id of the object (name or id)
@return RETURN: the targeted object
|
def get(self, obj, id, sub_object=None):
""" Function get
Get an object by id
@param obj: object name ('hosts', 'puppetclasses'...)
@param id: the id of the object (name or id)
@return RETURN: the targeted object
"""
self.url = '{}{}/{}'.format(self.base_url, obj, id)
self.method = 'GET'
if sub_object:
self.url += '/' + sub_object
self.resp = requests.get(url=self.url, auth=self.auth,
headers=self.headers, cert=self.ca_cert)
if self.__process_resp__(obj):
return self.res
return False
|
[
"def get_id_by_name(self, obj, name):\n \"\"\" Function get_id_by_name\n Get the id of an object\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the targeted object\n \"\"\"\n list = self.list(obj, filter='name = \"{}\"'.format(name),\n only_id=True, limit=1)\n return list[name] if name in list.keys() else False",
"def delete(self, obj, id):\n \"\"\" Function delete\n Delete an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the server response\n \"\"\"\n self.url = '{}{}/{}'.format(self.base_url, obj, id)\n self.method = 'DELETE'\n self.resp = requests.delete(url=self.url,\n auth=self.auth,\n headers=self.headers, cert=self.ca_cert)\n return self.__process_resp__(obj)",
"def get(cls, id):\n ''' Retrieves an object by id. Returns None in case of failure '''\n\n if not id:\n return None\n\n redis = cls.get_redis()\n key = '{}:{}:obj'.format(cls.cls_key(), id)\n\n if not redis.exists(key):\n return None\n\n obj = cls(id=id)\n obj._persisted = True\n\n data = debyte_hash(redis.hgetall(key))\n\n for fieldname, field in obj.proxy:\n value = field.recover(data, redis)\n\n setattr(\n obj,\n fieldname,\n value\n )\n\n return obj",
"def get_object(table, id=None, condition=None, cache=False, fields=None, use_local=False,\n engine_name=None, session=None):\n \"\"\"\n Get obj in Local.object_caches first and also use get(cache=True) function if \n not found in object_caches\n \"\"\"\n from uliweb import functions, settings\n \n model = get_model(table, engine_name)\n \n #if id is an object of Model, so get the real id value\n if isinstance(id, Model):\n return id\n \n if cache:\n if use_local:\n s = get_session(session)\n key = get_object_id(s.engine_name, model.tablename, id)\n value = s.get_local_cache(key)\n if value:\n return value\n obj = model.get(id, condition=condition, fields=fields, cache=True)\n if use_local:\n value = s.get_local_cache(key, obj)\n else:\n obj = model.get(id, condition=condition, fields=fields)\n \n return obj",
"private Object get(Object obj, XmlParser.Node node) throws NoSuchMethodException,\n ClassNotFoundException, InvocationTargetException, IllegalAccessException\n {\n Class oClass = nodeClass(node);\n if (oClass != null)\n obj = null;\n else\n oClass = obj.getClass();\n\n String name = node.getAttribute(\"name\");\n String id = node.getAttribute(\"id\");\n if (log.isDebugEnabled()) log.debug(\"get \" + name);\n\n try\n {\n // try calling a getXxx method.\n Method method = oClass.getMethod(\"get\" + name.substring(0, 1).toUpperCase()\n + name.substring(1), (java.lang.Class[]) null);\n obj = method.invoke(obj, (java.lang.Object[]) null);\n configure(obj, node, 0);\n }\n catch (NoSuchMethodException nsme)\n {\n try\n {\n Field field = oClass.getField(name);\n obj = field.get(obj);\n configure(obj, node, 0);\n }\n catch (NoSuchFieldException nsfe)\n {\n throw nsme;\n }\n }\n if (id != null) _idMap.put(id, obj);\n return obj;\n }",
"def get(self, obj_id):\n \"\"\"\n Get a single item\n\n :param obj_id: int\n :return: dict|str\n \"\"\"\n response = self._client.session.get(\n '{url}/{id}'.format(\n url=self.endpoint_url, id=obj_id\n )\n )\n return self.process_response(response)",
"def get_object_id_by_params(obj, params=None, **kwargs):\n '''\n .. versionadded:: 2017.7\n\n Get ID of single Zabbix object specified by its name.\n\n :param obj: Zabbix object type\n :param params: Parameters by which object is uniquely identified\n :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring)\n :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring)\n :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring)\n\n :return: object ID\n '''\n if params is None:\n params = {}\n res = run_query(obj + '.get', params, **kwargs)\n if res and len(res) == 1:\n return six.text_type(res[0][ZABBIX_ID_MAPPER[obj]])\n else:\n raise SaltException('Zabbix API: Object does not exist or bad Zabbix user permissions or other unexpected '\n 'result. Called method {0} with params {1}. '\n 'Result: {2}'.format(obj + '.get', params, res))",
"function (soajs, id) {\n let id1;\n try {\n id1 = soajs.mongoDb.ObjectId(id.toString());\n return id1;\n }\n catch (e) {\n soajs.log.error(e);\n throw e;\n }\n }",
"def _get_object(objname, objtype):\n '''\n Helper function to retrieve objtype from pillars if objname\n is string_types, used for SupportedLoginProviders and\n OpenIdConnectProviderARNs.\n '''\n ret = None\n if objname is None:\n return ret\n\n if isinstance(objname, string_types):\n if objname in __opts__:\n ret = __opts__[objname]\n master_opts = __pillar__.get('master', {})\n if objname in master_opts:\n ret = master_opts[objname]\n if objname in __pillar__:\n ret = __pillar__[objname]\n elif isinstance(objname, objtype):\n ret = objname\n\n if not isinstance(ret, objtype):\n ret = None\n\n return ret",
"def get(self, object_id, cls=None):\n \"\"\"\n Retrieve an object by its object_id\n\n :param: object_id: the objects id.\n :param: cls: the objects class, if None use the default class from the datamanager\n :return: the requested object or None if not found\n \"\"\"\n cls = self.cls if cls is None else cls\n return self.session.query(cls).get(object_id)",
"def get_endpoint_obj(client, endpoint, object_id):\n ''' Tiny helper function that gets used all over the place to join the object ID to the endpoint and run a GET request, returning the result '''\n endpoint = '/'.join([endpoint, str(object_id)])\n return client.authenticated_request(endpoint).json()",
"def get(self, id):\n \"\"\"\n Get an by object by unique identifier\n\n :id string id: the bson id of an object\n :rtype: JSON\n \"\"\"\n try:\n if self.request.headers.get(\"Id\"):\n object_ = yield self.client.find_one({self.request.headers.get(\"Id\"): id})\n else:\n object_ = yield self.client.find_one_by_id(id)\n\n if object_:\n self.write(object_)\n return\n\n self.raise_error(404, \"%s/%s not found\" % (self.object_name, id))\n\n except InvalidId as ex:\n self.raise_error(400, message=\"Your ID is malformed: %s\" % id)\n except Exception as ex:\n self.logger.error(ex)\n self.raise_error()"
] |
[
0.785792887210846,
0.7371777892112732,
0.7038533091545105,
0.6925148367881775,
0.691858172416687,
0.6908140182495117,
0.6882432699203491,
0.6878669857978821,
0.686981201171875,
0.6845094561576843,
0.6832347512245178,
0.6818809509277344
] |
Function get_id_by_name
Get the id of an object
@param obj: object name ('hosts', 'puppetclasses'...)
@param id: the id of the object (name or id)
@return RETURN: the targeted object
|
def get_id_by_name(self, obj, name):
""" Function get_id_by_name
Get the id of an object
@param obj: object name ('hosts', 'puppetclasses'...)
@param id: the id of the object (name or id)
@return RETURN: the targeted object
"""
list = self.list(obj, filter='name = "{}"'.format(name),
only_id=True, limit=1)
return list[name] if name in list.keys() else False
|
[
"def get(self, obj, id, sub_object=None):\n \"\"\" Function get\n Get an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the targeted object\n \"\"\"\n self.url = '{}{}/{}'.format(self.base_url, obj, id)\n self.method = 'GET'\n if sub_object:\n self.url += '/' + sub_object\n self.resp = requests.get(url=self.url, auth=self.auth,\n headers=self.headers, cert=self.ca_cert)\n if self.__process_resp__(obj):\n return self.res\n return False",
"def get_object_id_by_params(obj, params=None, **kwargs):\n '''\n .. versionadded:: 2017.7\n\n Get ID of single Zabbix object specified by its name.\n\n :param obj: Zabbix object type\n :param params: Parameters by which object is uniquely identified\n :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring)\n :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring)\n :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring)\n\n :return: object ID\n '''\n if params is None:\n params = {}\n res = run_query(obj + '.get', params, **kwargs)\n if res and len(res) == 1:\n return six.text_type(res[0][ZABBIX_ID_MAPPER[obj]])\n else:\n raise SaltException('Zabbix API: Object does not exist or bad Zabbix user permissions or other unexpected '\n 'result. Called method {0} with params {1}. '\n 'Result: {2}'.format(obj + '.get', params, res))",
"def by_id(cls, oid):\n \"\"\"Find a model object by its ``ObjectId``,\n ``oid`` can be string or ObjectId\"\"\"\n if oid:\n d = cls.collection.find_one(ObjectId(oid))\n if d:\n return cls(**d)",
"def find_item_by_id(self, object_id):\n \"\"\"Get item based on its id or uuid\n\n :param object_id:\n :type object_id: int | str\n :return:\n :rtype: alignak.objects.item.Item | None\n \"\"\"\n # Item id may be an item\n if isinstance(object_id, Item):\n return object_id\n\n # Item id should be a uuid string\n if not isinstance(object_id, string_types):\n logger.debug(\"Find an item by id, object_id is not int nor string: %s\", object_id)\n return object_id\n\n for items in [self.hosts, self.services, self.actions, self.checks, self.hostgroups,\n self.servicegroups, self.contacts, self.contactgroups]:\n if object_id in items:\n return items[object_id]\n\n # raise AttributeError(\"Item with id %s not found\" % object_id) # pragma: no cover,\n logger.error(\"Item with id %s not found\", str(object_id)) # pragma: no cover,\n return None",
"def get_by_object(cls, pid_type, object_type, object_uuid):\n \"\"\"Get a persistent identifier for a given object.\n\n :param pid_type: Persistent identifier type.\n :param object_type: The object type is a string that identify its type.\n :param object_uuid: The object UUID.\n :raises invenio_pidstore.errors.PIDDoesNotExistError: If no PID is\n found.\n :returns: A :class:`invenio_pidstore.models.PersistentIdentifier`\n instance.\n \"\"\"\n try:\n return cls.query.filter_by(\n pid_type=pid_type,\n object_type=object_type,\n object_uuid=object_uuid\n ).one()\n except NoResultFound:\n raise PIDDoesNotExistError(pid_type, None)",
"def get_id(id_or_obj):\n \"\"\"\n Returns the 'id' attribute of 'id_or_obj' if present; if not,\n returns 'id_or_obj'.\n \"\"\"\n if isinstance(id_or_obj, six.string_types + (int,)):\n # It's an ID\n return id_or_obj\n try:\n return id_or_obj.id\n except AttributeError:\n return id_or_obj",
"def delete(self, obj, id):\n \"\"\" Function delete\n Delete an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the server response\n \"\"\"\n self.url = '{}{}/{}'.format(self.base_url, obj, id)\n self.method = 'DELETE'\n self.resp = requests.delete(url=self.url,\n auth=self.auth,\n headers=self.headers, cert=self.ca_cert)\n return self.__process_resp__(obj)",
"def find_by_id(self, _id, **kwargs):\n \"\"\"\n Pass me anything that looks like an _id : str, ObjectId, {\"_id\": str}, {\"_id\": ObjectId}\n \"\"\"\n\n if type(_id) == dict and _id.get(\"_id\"):\n return self.find_one({\"_id\": ObjectId(_id[\"_id\"])}, **kwargs)\n\n return self.find_one({\"_id\": ObjectId(_id)}, **kwargs)",
"def _get_or_fetch_id(self, zobj, fetch_func):\n \"\"\" Returns the ID of a Zobject wether it's already known or not\n\n If zobj.id is not known (frequent if zobj is a selector), fetches first\n the object and then returns its ID.\n\n :type zobj: a zobject subclass\n :type fetch_func: the function to fetch the zobj from server if its id\n is undefined.\n :returns: the object id\n \"\"\"\n\n try:\n return zobj.id\n except AttributeError:\n try:\n return fetch_func(zobj).id\n except AttributeError:\n raise ValueError('Unqualified Resource')",
"def get_page_object_by_id(context, object_type, oid):\n \"\"\"\n **Arguments**\n\n ``object_type``\n object type\n\n ``oid``\n id for object selection\n\n :return selected object\n \"\"\"\n if type(oid) != int:\n raise template.TemplateSyntaxError('page_object_by_id tag requires a integer argument')\n\n selected_object = None\n\n try:\n try:\n for obj in context['page']['content'][object_type]:\n sid = '{0:>s}:{1:>s}:{2:>s}:{3:>d}'.format(\n obj.language, context['page']['page'].name, obj.type, oid\n )\n if obj.sid == sid:\n selected_object = obj\n break\n except TypeError:\n pass\n except KeyError:\n try:\n try:\n for obj in context['page']['ext_content'][object_type]:\n sid = '{0:>s}:{1:>s}:{2:>s}:{3:>d}'.format(\n obj.language, context['page']['page'].name, obj.type, oid\n )\n if obj.sid == sid:\n selected_object = obj\n break\n except TypeError:\n pass\n except KeyError:\n raise template.TemplateSyntaxError('wrong content type: {0:>s}'.format(object_type))\n return selected_object",
"def by_id(cls, _id, engine_or_session):\n \"\"\"\n Get one object by primary_key value.\n \"\"\"\n ses, auto_close = ensure_session(engine_or_session)\n obj = ses.query(cls).get(_id)\n if auto_close:\n ses.close()\n return obj",
"def get_one(self, object_id):\n \"\"\"\n Retrieve an object by its object_id\n\n :param object_id: the objects id.\n :return: the requested object\n :raises: :class: NoResultFound when the object could not be found\n \"\"\"\n return self.session.query(self.cls).filter_by(id=object_id).one()"
] |
[
0.7663769125938416,
0.7547003626823425,
0.7183605432510376,
0.7151867151260376,
0.708323061466217,
0.7081875205039978,
0.6977672576904297,
0.6951950788497925,
0.6887167096138,
0.68255215883255,
0.6808472871780396,
0.678314745426178
] |
Function set
Set an object by id
@param obj: object name ('hosts', 'puppetclasses'...)
@param id: the id of the object (name or id)
@param action: specific action of an object ('power'...)
@param payload: the dict of the payload
@param async: should this request be async, if true use
return.result() to get the response
@return RETURN: the server response
|
def set(self, obj, id, payload, action='', async=False):
""" Function set
Set an object by id
@param obj: object name ('hosts', 'puppetclasses'...)
@param id: the id of the object (name or id)
@param action: specific action of an object ('power'...)
@param payload: the dict of the payload
@param async: should this request be async, if true use
return.result() to get the response
@return RETURN: the server response
"""
self.url = '{}{}/{}'.format(self.base_url, obj, id)
self.method = 'PUT'
if action:
self.url += '/{}'.format(action)
self.payload = json.dumps(payload)
if async:
session = FuturesSession()
return session.put(url=self.url, auth=self.auth,
headers=self.headers, data=self.payload,
cert=self.ca_cert)
else:
self.resp = requests.put(url=self.url, auth=self.auth,
headers=self.headers, data=self.payload,
cert=self.ca_cert)
if self.__process_resp__(obj):
return self.res
return False
|
[
"def create(self, obj, payload, async=False):\n \"\"\" Function create\n Create an new object\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param payload: the dict of the payload\n @param async: should this request be async, if true use\n return.result() to get the response\n @return RETURN: the server response\n \"\"\"\n self.url = self.base_url + obj\n self.method = 'POST'\n self.payload = json.dumps(payload)\n if async:\n self.method = 'POST(Async)'\n session = FuturesSession()\n self.resp = session.post(url=self.url, auth=self.auth,\n headers=self.headers, data=self.payload,\n cert=self.ca_cert)\n return self.resp\n else:\n self.resp = requests.post(url=self.url, auth=self.auth,\n headers=self.headers,\n data=self.payload, cert=self.ca_cert)\n return self.__process_resp__(obj)",
"def get(self, obj, id, sub_object=None):\n \"\"\" Function get\n Get an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the targeted object\n \"\"\"\n self.url = '{}{}/{}'.format(self.base_url, obj, id)\n self.method = 'GET'\n if sub_object:\n self.url += '/' + sub_object\n self.resp = requests.get(url=self.url, auth=self.auth,\n headers=self.headers, cert=self.ca_cert)\n if self.__process_resp__(obj):\n return self.res\n return False",
"function send(payload, promise) {\n\tif (typeof apostle.domainKey == 'undefined'){\n\t\tpromise.reject('invalid', [{error: 'No domain key defined. Please set a domain key with `apostle.domainKey = \"abc123\"`'}])\n\t\treturn;\n\t}\n\t(request || superagent)\n\t\t.post(apostle.deliveryEndpoint)\n\t\t.type('json')\n\t\t.send(payload)\n\t\t.set('Authorization', 'Bearer ' + apostle.domainKey)\n\t\t.set('Apostle-Client', 'JavaScript/v0.1.1')\n\t\t.end(function(err, res){\n\t\t\tif(res.ok){\n\t\t\t\tpromise.fulfill()\n\t\t\t}else{\n\t\t\t\tpromise.reject('error', res)\n\t\t\t}\n\t\t})\n}",
"def delete(self, obj, id):\n \"\"\" Function delete\n Delete an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the server response\n \"\"\"\n self.url = '{}{}/{}'.format(self.base_url, obj, id)\n self.method = 'DELETE'\n self.resp = requests.delete(url=self.url,\n auth=self.auth,\n headers=self.headers, cert=self.ca_cert)\n return self.__process_resp__(obj)",
"function actionCreator(type, payload, meta) {\n var data = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : {};\n\n var action = _objectSpread({\n type: type,\n payload: payload\n }, data);\n\n if (payload instanceof Error) {\n action.error = true;\n }\n\n if (meta) {\n action.meta = meta;\n }\n\n return action;\n}",
"def emit(self, action, payload):\n \"\"\"Emit action with payload via `requests.post`.\"\"\"\n url = self.get_emit_api(action)\n headers = {\n 'User-Agent': 'rio/%s' % VERSION,\n 'X-Rio-Protocol': '1',\n }\n args = dict(\n url=url,\n json=payload,\n headers=headers,\n timeout=self.timeout,\n )\n resp = requests.post(**args)\n data = resp.json()\n is_success = resp.status_code == 200\n result = dict(\n is_success=is_success,\n message=data['message'],\n )\n if result['is_success']:\n result.update(\n event_uuid=data['event']['uuid'],\n task_id=data['task']['id'],\n )\n return result",
"def set_from_json(self, obj, json, models=None, setter=None):\n '''Sets the value of this property from a JSON value.\n\n Args:\n obj: (HasProps) : instance to set the property value on\n\n json: (JSON-value) : value to set to the attribute to\n\n models (dict or None, optional) :\n Mapping of model ids to models (default: None)\n\n This is needed in cases where the attributes to update also\n have values that have references.\n\n setter (ClientSession or ServerSession or None, optional) :\n This is used to prevent \"boomerang\" updates to Bokeh apps.\n (default: None)\n\n In the context of a Bokeh server application, incoming updates\n to properties will be annotated with the session that is\n doing the updating. This value is propagated through any\n subsequent change notifications that the update triggers.\n The session can compare the event setter to itself, and\n suppress any updates that originate from itself.\n\n Returns:\n None\n\n '''\n self._internal_set(obj, json, setter=setter)",
"async def set_async(self, type_name, entity):\n \"\"\"Sets an entity asynchronously using the API. Shortcut for using async_call() with the 'Set' method.\n\n :param type_name: The type of entity\n :param entity: The entity to set\n :raise MyGeotabException: Raises when an exception occurs on the MyGeotab server\n \"\"\"\n return await self.call_async('Set', type_name=type_name, entity=entity)",
"def init_object(self, args, kwargs):\n \"\"\"This method is reponsible for setting :attr:`obj`.\n\n It is called during :meth:`prepare_args`.\n \"\"\"\n self.object_id = kwargs.pop(self.pk, None)\n if self.object_id is not None:\n self.obj = self.Model.query.get(self.object_id)\n actions.context[\"object\"] = self.obj\n\n return args, kwargs",
"def _simple_action(self, action=None):\n '''Issue a request for an API method whose only param is the obj ID.\n\n :param str action: The name of the action for the resource\n :returns: Response from the API\n :rtype: dict\n '''\n if not action:\n raise Exception('No simple action defined')\n path = \"/\".join([self.RESOURCE, action])\n response = self.client.request(\n path, {self.RESOURCE_ID_ATTRIBUTE: self.object_id}\n )\n return response",
"def list(self, obj, filter=False, only_id=False, limit=20):\n \"\"\" Function list\n Get the list of an object\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param filter: filter for objects\n @param only_id: boolean to only return dict with name/id\n @return RETURN: the list of the object\n \"\"\"\n self.url = '{}{}/?per_page={}'.format(self.base_url, obj, limit)\n self.method = 'GET'\n if filter:\n self.url += '&search={}'.format(filter)\n self.resp = requests.get(url=self.url, auth=self.auth,\n headers=self.headers, cert=self.ca_cert)\n if only_id:\n if self.__process_resp__(obj) is False:\n return False\n if type(self.res['results']) is list:\n return dict((x['name'], x['id']) for x in self.res['results'])\n elif type(self.res['results']) is dict:\n r = {}\n for v in self.res['results'].values():\n for vv in v:\n r[vv['name']] = vv['id']\n return r\n else:\n return False\n else:\n return self.__process_resp__(obj)",
"def perform(action, payload = {})\n payload.merge!(\n cmd: action,\n version: DeathByCaptcha::API_VERSION,\n username: self.username,\n password: self.password\n )\n\n socket = create_socket()\n socket.puts(payload.to_json)\n response = socket.read()\n socket.close()\n\n begin\n response = JSON.parse(response)\n rescue\n raise DeathByCaptcha::APIResponseError.new(\"invalid JSON: #{response}\")\n end\n\n if !(error = response['error'].to_s).empty?\n case error\n when 'not-logged-in', 'invalid-credentials', 'banned', 'insufficient-funds'\n raise DeathByCaptcha::APIForbidden\n when 'invalid-captcha'\n raise DeathByCaptcha::APIBadRequest\n when 'service-overload'\n raise DeathByCaptcha::APIServiceUnavailable\n else\n raise DeathByCaptcha::APIResponseError.new(error)\n end\n end\n\n response\n end"
] |
[
0.7629753947257996,
0.6833419799804688,
0.6769798994064331,
0.6621506810188293,
0.6462209820747375,
0.6453836560249329,
0.6416064500808716,
0.6394880414009094,
0.6394200921058655,
0.6385706663131714,
0.638204038143158,
0.6359037160873413
] |
Function create
Create an new object
@param obj: object name ('hosts', 'puppetclasses'...)
@param payload: the dict of the payload
@param async: should this request be async, if true use
return.result() to get the response
@return RETURN: the server response
|
def create(self, obj, payload, async=False):
""" Function create
Create an new object
@param obj: object name ('hosts', 'puppetclasses'...)
@param payload: the dict of the payload
@param async: should this request be async, if true use
return.result() to get the response
@return RETURN: the server response
"""
self.url = self.base_url + obj
self.method = 'POST'
self.payload = json.dumps(payload)
if async:
self.method = 'POST(Async)'
session = FuturesSession()
self.resp = session.post(url=self.url, auth=self.auth,
headers=self.headers, data=self.payload,
cert=self.ca_cert)
return self.resp
else:
self.resp = requests.post(url=self.url, auth=self.auth,
headers=self.headers,
data=self.payload, cert=self.ca_cert)
return self.__process_resp__(obj)
|
[
"def set(self, obj, id, payload, action='', async=False):\n \"\"\" Function set\n Set an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @param action: specific action of an object ('power'...)\n @param payload: the dict of the payload\n @param async: should this request be async, if true use\n return.result() to get the response\n @return RETURN: the server response\n \"\"\"\n self.url = '{}{}/{}'.format(self.base_url, obj, id)\n self.method = 'PUT'\n if action:\n self.url += '/{}'.format(action)\n self.payload = json.dumps(payload)\n if async:\n session = FuturesSession()\n return session.put(url=self.url, auth=self.auth,\n headers=self.headers, data=self.payload,\n cert=self.ca_cert)\n else:\n self.resp = requests.put(url=self.url, auth=self.auth,\n headers=self.headers, data=self.payload,\n cert=self.ca_cert)\n if self.__process_resp__(obj):\n return self.res\n return False",
"def create_object(self, obj_type, payload, return_fields=None):\n \"\"\"Create an Infoblox object of type 'obj_type'\n\n Args:\n obj_type (str): Infoblox object type,\n e.g. 'network', 'range', etc.\n payload (dict): Payload with data to send\n return_fields (list): List of fields to be returned\n Returns:\n The object reference of the newly create object\n Raises:\n InfobloxException\n \"\"\"\n self._validate_obj_type_or_die(obj_type)\n\n query_params = self._build_query_params(return_fields=return_fields)\n\n url = self._construct_url(obj_type, query_params)\n opts = self._get_request_options(data=payload)\n self._log_request('post', url, opts)\n if(self.session.cookies):\n # the first 'get' or 'post' action will generate a cookie\n # after that, we don't need to re-authenticate\n self.session.auth = None\n r = self.session.post(url, **opts)\n\n self._validate_authorized(r)\n\n if r.status_code != requests.codes.CREATED:\n response = utils.safe_json_load(r.content)\n already_assigned = 'is assigned to another network view'\n if response and already_assigned in response.get('text'):\n exception = ib_ex.InfobloxMemberAlreadyAssigned\n else:\n exception = ib_ex.InfobloxCannotCreateObject\n raise exception(\n response=response,\n obj_type=obj_type,\n content=r.content,\n args=payload,\n code=r.status_code)\n\n return self._parse_reply(r)",
"async def create(cls, host, *args, **kwargs):\n \"\"\"Asynchronously create a :class:`Proxy` object.\n\n :param str host: A passed host can be a domain or IP address.\n If the host is a domain, try to resolve it\n :param str \\*args:\n (optional) Positional arguments that :class:`Proxy` takes\n :param str \\*\\*kwargs:\n (optional) Keyword arguments that :class:`Proxy` takes\n\n :return: :class:`Proxy` object\n :rtype: proxybroker.Proxy\n\n :raises ResolveError: If could not resolve the host\n :raises ValueError: If the port > 65535\n \"\"\" # noqa: W605\n loop = kwargs.pop('loop', None)\n resolver = kwargs.pop('resolver', Resolver(loop=loop))\n try:\n _host = await resolver.resolve(host)\n self = cls(_host, *args, **kwargs)\n except (ResolveError, ValueError) as e:\n log.error('%s:%s: Error at creating: %s' % (host, args[0], e))\n raise\n return self",
"function send(payload, promise) {\n\tif (typeof apostle.domainKey == 'undefined'){\n\t\tpromise.reject('invalid', [{error: 'No domain key defined. Please set a domain key with `apostle.domainKey = \"abc123\"`'}])\n\t\treturn;\n\t}\n\t(request || superagent)\n\t\t.post(apostle.deliveryEndpoint)\n\t\t.type('json')\n\t\t.send(payload)\n\t\t.set('Authorization', 'Bearer ' + apostle.domainKey)\n\t\t.set('Apostle-Client', 'JavaScript/v0.1.1')\n\t\t.end(function(err, res){\n\t\t\tif(res.ok){\n\t\t\t\tpromise.fulfill()\n\t\t\t}else{\n\t\t\t\tpromise.reject('error', res)\n\t\t\t}\n\t\t})\n}",
"async def from_payload(cls, payload, endpoint, idgen, debug, force_protocol=None):\n \"\"\"Create Service object from a payload.\"\"\"\n service_name = payload[\"service\"]\n\n if \"protocols\" not in payload:\n raise SongpalException(\n \"Unable to find protocols from payload: %s\" % payload\n )\n\n protocols = payload[\"protocols\"]\n _LOGGER.debug(\"Available protocols for %s: %s\", service_name, protocols)\n if force_protocol and force_protocol.value in protocols:\n protocol = force_protocol\n elif \"websocket:jsonizer\" in protocols:\n protocol = ProtocolType.WebSocket\n elif \"xhrpost:jsonizer\" in protocols:\n protocol = ProtocolType.XHRPost\n else:\n raise SongpalException(\n \"No known protocols for %s, got: %s\" % (service_name, protocols)\n )\n _LOGGER.debug(\"Using protocol: %s\" % protocol)\n\n service_endpoint = \"%s/%s\" % (endpoint, service_name)\n\n # creation here we want to pass the created service class to methods.\n service = cls(service_name, service_endpoint, protocol, idgen, debug)\n\n sigs = await cls.fetch_signatures(\n service_endpoint, protocol, idgen\n )\n\n if debug > 1:\n _LOGGER.debug(\"Signatures: %s\", sigs)\n if \"error\" in sigs:\n _LOGGER.error(\"Got error when fetching sigs: %s\", sigs[\"error\"])\n return None\n\n methods = {}\n\n for sig in sigs[\"results\"]:\n name = sig[0]\n parsed_sig = MethodSignature.from_payload(*sig)\n if name in methods:\n _LOGGER.debug(\"Got duplicate signature for %s, existing was %s. Keeping the existing one\",\n parsed_sig, methods[name])\n else:\n methods[name] = Method(service, parsed_sig, debug)\n\n service.methods = methods\n\n if \"notifications\" in payload and \"switchNotifications\" in methods:\n notifications = [\n Notification(\n service_endpoint, methods[\"switchNotifications\"], notification\n )\n for notification in payload[\"notifications\"]\n ]\n service.notifications = notifications\n _LOGGER.debug(\"Got notifications: %s\" % notifications)\n\n return service",
"def create_room(self, payload):\n ''' create a stream in a non-inclusive manner '''\n response, status_code = self.__pod__.Streams.post_v2_room_create(\n # V2RoomAttributes\n payload=payload\n ).result()\n self.logger.debug('%s: %s' % (status_code, response))\n return status_code, response",
"def create_items(portal_type=None, uid=None, endpoint=None, **kw):\n \"\"\" create items\n\n 1. If the uid is given, get the object and create the content in there\n (assumed that it is folderish)\n 2. If the uid is 0, the target folder is assumed the portal.\n 3. If there is no uid given, the payload is checked for either a key\n - `parent_uid` specifies the *uid* of the target folder\n - `parent_path` specifies the *physical path* of the target folder\n \"\"\"\n\n # disable CSRF\n req.disable_csrf_protection()\n\n # destination where to create the content\n container = uid and get_object_by_uid(uid) or None\n\n # extract the data from the request\n records = req.get_request_data()\n\n results = []\n for record in records:\n\n # get the portal_type\n if portal_type is None:\n # try to fetch the portal type out of the request data\n portal_type = record.pop(\"portal_type\", None)\n\n # check if it is allowed to create the portal_type\n if not is_creation_allowed(portal_type):\n fail(401, \"Creation of '{}' is not allowed\".format(portal_type))\n\n if container is None:\n # find the container for content creation\n container = find_target_container(portal_type, record)\n\n # Check if we have a container and a portal_type\n if not all([container, portal_type]):\n fail(400, \"Please provide a container path/uid and portal_type\")\n\n # create the object and pass in the record data\n obj = create_object(container, portal_type, **record)\n results.append(obj)\n\n if not results:\n fail(400, \"No Objects could be created\")\n\n return make_items_for(results, endpoint=endpoint)",
"def create(cls, server, item, shuffle=0, repeat=0, includeChapters=1, includeRelated=1):\n \"\"\" Create and returns a new :class:`~plexapi.playqueue.PlayQueue`.\n\n Paramaters:\n server (:class:`~plexapi.server.PlexServer`): Server you are connected to.\n item (:class:`~plexapi.media.Media` or class:`~plexapi.playlist.Playlist`): A media or Playlist.\n shuffle (int, optional): Start the playqueue shuffled.\n repeat (int, optional): Start the playqueue shuffled.\n includeChapters (int, optional): include Chapters.\n includeRelated (int, optional): include Related.\n \"\"\"\n args = {}\n args['includeChapters'] = includeChapters\n args['includeRelated'] = includeRelated\n args['repeat'] = repeat\n args['shuffle'] = shuffle\n if item.type == 'playlist':\n args['playlistID'] = item.ratingKey\n args['type'] = item.playlistType\n else:\n uuid = item.section().uuid\n args['key'] = item.key\n args['type'] = item.listType\n args['uri'] = 'library://%s/item/%s' % (uuid, item.key)\n path = '/playQueues%s' % utils.joinArgs(args)\n data = server.query(path, method=server._session.post)\n c = cls(server, data, initpath=path)\n # we manually add a key so we can pass this to playMedia\n # since the data, does not contain a key.\n c.key = item.key\n return c",
"def create(cls, parent=None, **kwargs):\n \"\"\"Create an object and return it\"\"\"\n\n if parent is None:\n raise Exception(\"Parent class is required\")\n\n route = copy(parent.route)\n if cls.ID_NAME is not None:\n route[cls.ID_NAME] = \"\"\n\n obj = cls(key=parent.key, route=route, config=parent.config)\n\n start = datetime.now()\n response = requests.post(obj._url(), auth=(obj.key, \"\"), data=kwargs)\n cls._delay_for_ratelimits(start)\n\n if response.status_code not in cls.TRUTHY_CODES:\n return cls._handle_request_exception(response)\n\n # No envelope on post requests\n data = response.json()\n obj.route[obj.ID_NAME] = data.get(\"id\", data.get(obj.ID_NAME))\n obj.data = data\n\n return obj",
"def importPuppetClasses(self, smartProxyId):\n \"\"\" Function importPuppetClasses\n Force the reload of puppet classes\n\n @param smartProxyId: smartProxy Id\n @return RETURN: the API result\n \"\"\"\n return self.api.create('{}/{}/import_puppetclasses'\n .format(self.objName, smartProxyId), '{}')",
"def create_async(resource_group_name, network_watcher_name, packet_capture_name, parameters, custom_headers:nil)\n # Send request\n promise = begin_create_async(resource_group_name, network_watcher_name, packet_capture_name, parameters, custom_headers:custom_headers)\n\n promise = promise.then do |response|\n # Defining deserialization method.\n deserialize_method = lambda do |parsed_response|\n result_mapper = Azure::Network::Mgmt::V2018_07_01::Models::PacketCaptureResult.mapper()\n parsed_response = @client.deserialize(result_mapper, parsed_response)\n end\n\n # Waiting for response.\n @client.get_long_running_operation_result(response, deserialize_method)\n end\n\n promise\n end",
"def create_async(self, path, value=b\"\", acl=None, ephemeral=False, sequence=False, makepath=False):\n \"\"\" wraps the default create() and handles encoding (Py3k) \"\"\"\n value = to_bytes(value)\n return super(XClient, self).create_async(path, value, acl, ephemeral, sequence, makepath)"
] |
[
0.6929964423179626,
0.681441605091095,
0.6703740954399109,
0.6687217950820923,
0.6633094549179077,
0.6537376046180725,
0.65224289894104,
0.6456822156906128,
0.643081545829773,
0.6398965120315552,
0.6397796273231506,
0.6394233703613281
] |
Function delete
Delete an object by id
@param obj: object name ('hosts', 'puppetclasses'...)
@param id: the id of the object (name or id)
@return RETURN: the server response
|
def delete(self, obj, id):
""" Function delete
Delete an object by id
@param obj: object name ('hosts', 'puppetclasses'...)
@param id: the id of the object (name or id)
@return RETURN: the server response
"""
self.url = '{}{}/{}'.format(self.base_url, obj, id)
self.method = 'DELETE'
self.resp = requests.delete(url=self.url,
auth=self.auth,
headers=self.headers, cert=self.ca_cert)
return self.__process_resp__(obj)
|
[
"def del_object(self, obj):\n \"\"\"Debug deletes obj of obj[_type] with id of obj['_id']\"\"\"\n if obj['_index'] is None or obj['_index'] == \"\":\n raise Exception(\"Invalid Object\")\n if obj['_id'] is None or obj['_id'] == \"\":\n raise Exception(\"Invalid Object\")\n if obj['_type'] is None or obj['_type'] == \"\":\n raise Exception(\"Invalid Object\")\n self.connect_es()\n self.es.delete(index=obj['_index'],\n id=obj['_id'],\n doc_type=obj['_type'])",
"function do_delete(self, ObjType, obj) {\n\treturn nr_fcall(\"nopg:do_delete\", function() {\n\t\tif(!(obj && obj.$id)) { throw new TypeError(\"opts.$id invalid: \" + util.inspect(obj) ); }\n\t\tvar query, params;\n\t\tquery = \"DELETE FROM \" + (ObjType.meta.table) + \" WHERE id = $1\";\n\t\tparams = [obj.$id];\n\t\treturn do_query(self, query, params);\n\t});\n}",
"def delete(self, object_id):\n \"\"\"\n Delete an object by its id\n\n :param object_id: the objects id.\n :return: the deleted object\n :raises: :class: NoResultFound when the object could not be found\n \"\"\"\n obj = self.session.query(self.cls).filter_by(id=object_id).one()\n self.session.delete(obj)\n return obj",
"public function admin_setup_delete( $id, $obj ) {\n\n\t\t$pod = pods_api()->load_pod( array( 'id' => $id ), false );\n\n\t\tif ( empty( $pod ) ) {\n\t\t\treturn $obj->error( __( 'Pod not found.', 'pods' ) );\n\t\t}\n\n\t\tpods_api()->delete_pod( array( 'id' => $id ) );\n\n\t\tunset( $obj->data[ $pod['id'] ] );\n\n\t\t$obj->total = count( $obj->data );\n\t\t$obj->total_found = count( $obj->data );\n\n\t\t$obj->message( __( 'Pod deleted successfully.', 'pods' ) );\n\t}",
"def delete(self, obj):\n \"\"\"Required functionality.\"\"\"\n del_id = obj.get_id()\n if not del_id:\n return\n\n coll = self.get_collection(obj.__class__.get_table_name())\n coll.delete_one({\"_id\": del_id})",
"def delete(self, obj):\n \"\"\"Required functionality.\"\"\"\n del_id = obj.get_id()\n if not del_id:\n return\n\n cur = self._conn().cursor()\n\n tabname = obj.__class__.get_table_name()\n query = 'delete from %s where id = ?' % tabname\n cur.execute(query, (del_id,))\n\n self._conn().commit()\n cur.close()",
"def get(self, obj, id, sub_object=None):\n \"\"\" Function get\n Get an object by id\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param id: the id of the object (name or id)\n @return RETURN: the targeted object\n \"\"\"\n self.url = '{}{}/{}'.format(self.base_url, obj, id)\n self.method = 'GET'\n if sub_object:\n self.url += '/' + sub_object\n self.resp = requests.get(url=self.url, auth=self.auth,\n headers=self.headers, cert=self.ca_cert)\n if self.__process_resp__(obj):\n return self.res\n return False",
"def delete_object(self, id):\n \"\"\"Deletes the object with the given ID from the graph.\"\"\"\n return self.request(\n \"{0}/{1}\".format(self.version, id), method=\"DELETE\"\n )",
"def delete(self, obj):\n \"\"\"\n Delete an object in CDSTAR and remove it from the catalog.\n\n :param obj: An object ID or an Object instance.\n \"\"\"\n obj = self.api.get_object(getattr(obj, 'id', obj))\n obj.delete()\n self.remove(obj.id)",
"def delete(self, id):\n \"\"\"\n Delete a resource by bson id\n :raises: 404 Not Found\n :raises: 400 Bad request\n :raises: 500 Server Error\n \"\"\"\n try:\n response = yield self.client.delete(id)\n\n if response.get(\"n\") > 0:\n self.write({\"message\": \"Deleted %s object: %s\" % (self.object_name, id) })\n return\n\n self.raise_error(404, \"Resource not found\")\n\n except InvalidId as ex:\n self.raise_error(400, message=\"Your ID is malformed: %s\" % id)\n except:\n self.raise_error()\n\n self.finish()",
"def delete_object(self, obj, view_kwargs):\n \"\"\"Delete an object through sqlalchemy\n\n :param DeclarativeMeta item: an item from sqlalchemy\n :param dict view_kwargs: kwargs from the resource view\n \"\"\"\n if obj is None:\n url_field = getattr(self, 'url_field', 'id')\n filter_value = view_kwargs[url_field]\n raise ObjectNotFound('{}: {} not found'.format(self.model.__name__, filter_value),\n source={'parameter': url_field})\n\n self.before_delete_object(obj, view_kwargs)\n\n self.session.delete(obj)\n try:\n self.session.commit()\n except JsonApiException as e:\n self.session.rollback()\n raise e\n except Exception as e:\n self.session.rollback()\n raise JsonApiException(\"Delete object error: \" + str(e))\n\n self.after_delete_object(obj, view_kwargs)",
"public <P extends ParaObject> void delete(P obj) {\n\t\tif (obj == null || obj.getId() == null) {\n\t\t\treturn;\n\t\t}\n\t\tinvokeDelete(obj.getType().concat(\"/\").concat(obj.getId()), null);\n\t}"
] |
[
0.7532449960708618,
0.7522357702255249,
0.7273005247116089,
0.7185198664665222,
0.7165303826332092,
0.7154962420463562,
0.7148327231407166,
0.7069894671440125,
0.7058194875717163,
0.704591691493988,
0.7043777108192444,
0.7033277750015259
] |
Modified ``run`` that captures return value and exceptions from ``target``
|
def run(self):
"""Modified ``run`` that captures return value and exceptions from ``target``"""
try:
if self._target:
return_value = self._target(*self._args, **self._kwargs)
if return_value is not None:
self._exception = OrphanedReturn(self, return_value)
except BaseException as err:
self._exception = err
finally:
# Avoid a refcycle if the thread is running a function with
# an argument that has a member that points to the thread.
del self._target, self._args, self._kwargs
|
[
"def capture(self, *args, **kwargs):\n '''Run a task and return a dictionary with stderr, stdout and the\n return value. Also, the traceback from the exception if there was\n one'''\n import traceback\n try:\n from StringIO import StringIO\n except ImportError:\n from io import StringIO\n stdout, stderr = sys.stdout, sys.stderr\n sys.stdout = out = StringIO()\n sys.stderr = err = StringIO()\n result = {\n 'exception': None,\n 'stderr': None,\n 'stdout': None,\n 'return': None\n }\n try:\n result['return'] = self.__call__(*args, **kwargs)\n except Exception:\n result['exception'] = traceback.format_exc()\n sys.stdout, sys.stderr = stdout, stderr\n result['stderr'] = err.getvalue()\n result['stdout'] = out.getvalue()\n return result",
"def _target(queue, function, *args, **kwargs):\n \"\"\"Run a function with arguments and return output via a queue.\n\n This is a helper function for the Process created in _Timeout. It runs\n the function with positional arguments and keyword arguments and then\n returns the function's output by way of a queue. If an exception gets\n raised, it is returned to _Timeout to be raised by the value property.\n \"\"\"\n try:\n queue.put((True, function(*args, **kwargs)))\n except:\n queue.put((False, sys.exc_info()[1]))",
"def process_execute(function, *args, **kwargs):\n \"\"\"Runs the given function returning its results or exception.\"\"\"\n try:\n return function(*args, **kwargs)\n except Exception as error:\n error.traceback = format_exc()\n return RemoteException(error, error.traceback)",
"def execute(function, *args, **kwargs):\n \"\"\"Runs the given function returning its results or exception.\"\"\"\n try:\n return function(*args, **kwargs)\n except Exception as error:\n error.traceback = format_exc()\n return error",
"def run_trial_exceptions(self, *args, **kwargs):\n '''\n A wrapper for run_trial that catches exceptions and returns them.\n It is meant for async simulations\n '''\n try:\n return self.run_trial(*args, **kwargs)\n except Exception as ex:\n c = ex.__cause__\n c.message = ''.join(traceback.format_exception(type(c), c, c.__traceback__)[:])\n return c",
"public static <V, X extends Exception> V runInFiberChecked(SuspendableCallable<V> target, Class<X> exceptionType) throws X, InterruptedException {\n return FiberUtil.runInFiberChecked(target, exceptionType);\n }",
"def unsafe_execute(self, result=None):\n \"\"\" un-wrapped execution, can raise excepetion\n\n :return: Execution result\n :rtype: kser.result.Result\n \"\"\"\n if result:\n self.result += result\n\n self._prerun()\n return self._onsuccess(self._postrun(self._run()))",
"def wrapped_target(target, q_stdout, q_stderr, q_error, robust, name, *args, **kwargs): # pragma: no cover\n \"\"\"\n Wraps a target with queues replacing stdout and stderr\n \"\"\"\n import sys\n sys.stdout = IOQueue(q_stdout)\n sys.stderr = IOQueue(q_stderr)\n\n try:\n target(*args, **kwargs)\n except:\n if not robust:\n s = 'Error in tab\\n' + traceback.format_exc()\n logger = daiquiri.getLogger(name)\n logger.error(s)\n else:\n raise\n\n\n\n if not robust:\n q_error.put(name)\n raise",
"def run(self, conf, arg, err):\n \"\"\"\n WeldContext is currently hidden from the Python API. We create a new\n context per Weld run and give ownership of it to the resulting value.\n\n NOTE: This can leak the context if the result of the Weld run is an\n error.\n \"\"\"\n weld_context_new = weld.weld_context_new\n weld_context_new.argtypes = [c_weld_conf]\n weld_context_new.restype = c_weld_context\n ctx = weld_context_new(conf.conf)\n\n weld_module_run = weld.weld_module_run\n # module, context, arg, &err\n weld_module_run.argtypes = [\n c_weld_module, c_weld_context, c_weld_value, c_weld_err]\n weld_module_run.restype = c_weld_value\n ret = weld_module_run(self.module, ctx, arg.val, err.error)\n return WeldValue(ret, assign=True, _ctx=ctx)",
"def run_and_capture(*args, **kwargs)\n ret = _wrap_run((proc { |*a, **k| Host.run_and_capture(*a, **k) }), *args, **kwargs)\n @last_err = ret[:err]\n @last_out = ret[:out]\n ret\n end",
"def run_run(self, run, conf=None, run_conf=None, use_thread=False, catch_exception=True):\n '''Runs a run in another thread. Non-blocking.\n\n Parameters\n ----------\n run : class, object\n Run class or object.\n run_conf : str, dict, file\n Specific configuration for the run.\n use_thread : bool\n If True, run run in thread and returns blocking function.\n\n Returns\n -------\n If use_thread is True, returns function, which blocks until thread terminates, and which itself returns run status.\n If use_thread is False, returns run status.\n '''\n if isinstance(conf, basestring) and os.path.isfile(conf):\n logging.info('Updating configuration from file %s', os.path.abspath(conf))\n elif conf is not None:\n logging.info('Updating configuration')\n conf = self.open_conf(conf)\n self._conf.update(conf)\n\n if isclass(run):\n # instantiate the class\n run = run(conf=self._conf)\n\n local_run_conf = {}\n # general parameters from conf\n if 'run_conf' in self._conf:\n logging.info('Updating run configuration using run_conf key from configuration')\n local_run_conf.update(self._conf['run_conf'])\n # check for class name, scan specific parameters from conf\n if run.__class__.__name__ in self._conf:\n logging.info('Updating run configuration using %s key from configuration' % (run.__class__.__name__,))\n local_run_conf.update(self._conf[run.__class__.__name__])\n\n if isinstance(run_conf, basestring) and os.path.isfile(run_conf):\n logging.info('Updating run configuration from file %s', os.path.abspath(run_conf))\n elif run_conf is not None:\n logging.info('Updating run configuration')\n run_conf = self.open_conf(run_conf)\n # check for class name, scan specific parameters from conf\n if run.__class__.__name__ in run_conf:\n run_conf = run_conf[run.__class__.__name__]\n # run_conf parameter has highest priority, updated last\n local_run_conf.update(run_conf)\n\n if use_thread:\n self.current_run = run\n\n @thunkify(thread_name='RunThread', daemon=True, default_func=self.current_run.get_run_status)\n def run_run_in_thread():\n return run.run(run_conf=local_run_conf)\n\n signal.signal(signal.SIGINT, self._signal_handler)\n logging.info('Press Ctrl-C to stop run')\n\n return run_run_in_thread()\n else:\n self.current_run = run\n status = run.run(run_conf=local_run_conf)\n if not catch_exception and status != run_status.finished:\n raise RuntimeError('Exception occurred. Please read the log.')\n return status",
"public static <X extends Exception> void runInFiberChecked(SuspendableRunnable target, Class<X> exceptionType) throws X, InterruptedException {\n FiberUtil.runInFiberChecked(target, exceptionType);\n }"
] |
[
0.7184444069862366,
0.7156774401664734,
0.7035624384880066,
0.6966459155082703,
0.6876786351203918,
0.6849420070648193,
0.6831562519073486,
0.6829209327697754,
0.6812566518783569,
0.6808006167411804,
0.6792249083518982,
0.67826247215271
] |
Start all queued payloads
|
def _start_payloads(self):
"""Start all queued payloads"""
with self._lock:
payloads = self._payloads.copy()
self._payloads.clear()
for subroutine in payloads:
thread = CapturingThread(target=subroutine)
thread.start()
self._threads.add(thread)
self._logger.debug('booted thread %s', thread)
time.sleep(0)
|
[
"async def _start_payloads(self):\n \"\"\"Start all queued payloads\"\"\"\n with self._lock:\n for coroutine in self._payloads:\n task = self.event_loop.create_task(coroutine())\n self._tasks.add(task)\n self._payloads.clear()\n await asyncio.sleep(0)",
"async def _start_payloads(self, nursery):\n \"\"\"Start all queued payloads\"\"\"\n with self._lock:\n for coroutine in self._payloads:\n nursery.start_soon(coroutine)\n self._payloads.clear()\n await trio.sleep(0)",
"async def _await_all(self):\n \"\"\"Async component of _run\"\"\"\n delay = 0.0\n # we run a top-level nursery that automatically reaps/cancels for us\n async with trio.open_nursery() as nursery:\n while self.running.is_set():\n await self._start_payloads(nursery=nursery)\n await trio.sleep(delay)\n delay = min(delay + 0.1, 1.0)\n # cancel the scope to cancel all payloads\n nursery.cancel_scope.cancel()",
"function start(payload) {\n for (var id in this.callbacks) {\n this.isPending[id] = false;\n this.isHandled[id] = false;\n }\n this.isDispatching = true;\n this.pendingPayload = payload;\n}",
"async def _run_payloads(self):\n \"\"\"Async component of _run\"\"\"\n delay = 0.0\n try:\n while self.running.is_set():\n await self._start_payloads()\n await self._reap_payloads()\n await asyncio.sleep(delay)\n delay = min(delay + 0.1, 1.0)\n except Exception:\n await self._cancel_payloads()\n raise",
"def start(self, payload):\n \"\"\"Start the daemon and all processes or only specific processes.\"\"\"\n # Start specific processes, if `keys` is given in the payload\n if payload.get('keys'):\n succeeded = []\n failed = []\n for key in payload.get('keys'):\n success = self.process_handler.start_process(key)\n if success:\n succeeded.append(str(key))\n else:\n failed.append(str(key))\n\n message = ''\n if len(succeeded) > 0:\n message += 'Started processes: {}.'.format(', '.join(succeeded))\n status = 'success'\n if len(failed) > 0:\n message += '\\nNo paused, queued or stashed process for keys: {}'.format(', '.join(failed))\n status = 'error'\n\n answer = {'message': message.strip(), 'status': status}\n\n # Start a all processes and the daemon\n else:\n self.process_handler.start_all()\n if self.paused:\n self.paused = False\n answer = {'message': 'Daemon and all processes started.',\n 'status': 'success'}\n else:\n answer = {'message': 'Daemon already running, starting all processes.',\n 'status': 'success'}\n return answer",
"def register_payload(self, *payloads, flavour: ModuleType):\n \"\"\"Queue one or more payload for execution after its runner is started\"\"\"\n for payload in payloads:\n self._logger.debug('registering payload %s (%s)', NameRepr(payload), NameRepr(flavour))\n self.runners[flavour].register_payload(payload)",
"def payload_register(ptype, klass, pid):\n \"\"\" is used while a hook is running to let Juju know that a\n payload has been started.\"\"\"\n cmd = ['payload-register']\n for x in [ptype, klass, pid]:\n cmd.append(x)\n subprocess.check_call(cmd)",
"def start(queue, profile=None, tag='salt/engine/sqs', owner_acct_id=None):\n '''\n Listen to sqs and fire message on event bus\n '''\n if __opts__.get('__role') == 'master':\n fire_master = salt.utils.event.get_master_event(\n __opts__,\n __opts__['sock_dir'],\n listen=False).fire_event\n else:\n fire_master = __salt__['event.send']\n\n message_format = __opts__.get('sqs.message_format', None)\n\n sqs = _get_sqs_conn(profile)\n q = None\n while True:\n if not q:\n q = sqs.get_queue(queue, owner_acct_id=owner_acct_id)\n q.set_message_class(boto.sqs.message.RawMessage)\n\n _process_queue(q, queue, fire_master, tag=tag, owner_acct_id=owner_acct_id, message_format=message_format)",
"def run_payload(self, payload, *, flavour: ModuleType):\n \"\"\"Execute one payload after its runner is started and return its output\"\"\"\n return self.runners[flavour].run_payload(payload)",
"def run(self):\n \"\"\"\n Execute all current and future payloads\n\n Blocks and executes payloads until :py:meth:`stop` is called.\n It is an error for any orphaned payload to return or raise.\n \"\"\"\n self._logger.info('runner started: %s', self)\n try:\n with self._lock:\n assert not self.running.is_set() and self._stopped.is_set(), 'cannot re-run: %s' % self\n self.running.set()\n self._stopped.clear()\n self._run()\n except Exception:\n self._logger.exception('runner aborted: %s', self)\n raise\n else:\n self._logger.info('runner stopped: %s', self)\n finally:\n with self._lock:\n self.running.clear()\n self._stopped.set()",
"def _reap_payloads(self):\n \"\"\"Clean up all finished payloads\"\"\"\n for thread in self._threads.copy():\n # CapturingThread.join will throw\n if thread.join(timeout=0):\n self._threads.remove(thread)\n self._logger.debug('reaped thread %s', thread)"
] |
[
0.894452691078186,
0.88545161485672,
0.7891626954078674,
0.7770432233810425,
0.7561456561088562,
0.7468627095222473,
0.7409204840660095,
0.7305784225463867,
0.7299909591674805,
0.7234981060028076,
0.7174237966537476,
0.70259028673172
] |
Clean up all finished payloads
|
def _reap_payloads(self):
"""Clean up all finished payloads"""
for thread in self._threads.copy():
# CapturingThread.join will throw
if thread.join(timeout=0):
self._threads.remove(thread)
self._logger.debug('reaped thread %s', thread)
|
[
"async def _reap_payloads(self):\n \"\"\"Clean up all finished payloads\"\"\"\n for task in self._tasks.copy():\n if task.done():\n self._tasks.remove(task)\n if task.exception() is not None:\n raise task.exception()\n await asyncio.sleep(0)",
"async def _cancel_payloads(self):\n \"\"\"Cancel all remaining payloads\"\"\"\n for task in self._tasks:\n task.cancel()\n await asyncio.sleep(0)\n for task in self._tasks:\n while not task.done():\n await asyncio.sleep(0.1)\n task.cancel()",
"def clear(self, payload):\n \"\"\"Clear queue from any `done` or `failed` entries.\n\n The log will be rotated once. Otherwise we would loose all logs from\n thoes finished processes.\n \"\"\"\n self.logger.rotate(self.queue)\n self.queue.clear()\n self.logger.write(self.queue)\n\n answer = {'message': 'Finished entries have been removed.', 'status': 'success'}\n return answer",
"def reset_everything(self, payload):\n \"\"\"Kill all processes, delete the queue and clean everything up.\"\"\"\n kill_signal = signals['9']\n self.process_handler.kill_all(kill_signal, True)\n self.process_handler.wait_for_finish()\n self.reset = True\n\n answer = {'message': 'Resetting current queue', 'status': 'success'}\n return answer",
"def cleanup_payload(self, payload):\n \"\"\"\n Basically, turns payload that looks like ' \\\\n ' to ''. In the \n calling function, if this function returns '' no object is added \n for that payload.\n \"\"\"\n p = payload.replace('\\n', '')\n p = p.rstrip()\n p = p.lstrip()\n return p",
"async def _start_payloads(self, nursery):\n \"\"\"Start all queued payloads\"\"\"\n with self._lock:\n for coroutine in self._payloads:\n nursery.start_soon(coroutine)\n self._payloads.clear()\n await trio.sleep(0)",
"async def _await_all(self):\n \"\"\"Async component of _run\"\"\"\n delay = 0.0\n # we run a top-level nursery that automatically reaps/cancels for us\n async with trio.open_nursery() as nursery:\n while self.running.is_set():\n await self._start_payloads(nursery=nursery)\n await trio.sleep(delay)\n delay = min(delay + 0.1, 1.0)\n # cancel the scope to cancel all payloads\n nursery.cancel_scope.cancel()",
"protected void cleanupFromFinish() {\n _response = null;\n _bufferSize = 0;\n _encoding = null;\n // LIBERTY _responseBuffer = null;\n // _outWriterEncoding = null;\n _gotOutputStream = false;\n _gotWriter = false;\n this._pwriter = null;\n }",
"private function cleanUp()\n {\n if ($this->quiet) {\n $errors = explode(PHP_EOL, ob_get_clean());\n $shown = [];\n\n foreach ($errors as $error) {\n if ($error && !in_array($error, $shown)) {\n out($error, 'error');\n $shown[] = $error;\n }\n }\n }\n\n if (file_exists($this->tmpPharPath)) {\n @unlink($this->tmpPharPath);\n }\n if (file_exists($this->tmpPubkeyPath)) {\n @unlink($this->tmpPharPath);\n }\n }",
"def _cleanup(self) -> None:\n \"\"\"Cleanup unused transports.\"\"\"\n if self._cleanup_handle:\n self._cleanup_handle.cancel()\n\n now = self._loop.time()\n timeout = self._keepalive_timeout\n\n if self._conns:\n connections = {}\n deadline = now - timeout\n for key, conns in self._conns.items():\n alive = []\n for proto, use_time in conns:\n if proto.is_connected():\n if use_time - deadline < 0:\n transport = proto.transport\n proto.close()\n if (key.is_ssl and\n not self._cleanup_closed_disabled):\n self._cleanup_closed_transports.append(\n transport)\n else:\n alive.append((proto, use_time))\n\n if alive:\n connections[key] = alive\n\n self._conns = connections\n\n if self._conns:\n self._cleanup_handle = helpers.weakref_handle(\n self, '_cleanup', timeout, self._loop)",
"public void done() {\n\t\tfor(ByteBuffer bb : bbs) {\n\t\t\tringPut(bb);\n\t\t}\n\t\tbbs.clear();\n\t\tcurr = null;\n\t}",
"function (aRequests) {\n\t\t\taRequests.forEach(function (oRequest) {\n\t\t\t\tif (Array.isArray(oRequest)) {\n\t\t\t\t\t_Requestor.cleanBatch(oRequest);\n\t\t\t\t} else {\n\t\t\t\t\toRequest.body = _Requestor.cleanPayload(oRequest.body);\n\t\t\t\t}\n\t\t\t});\n\t\t\treturn aRequests;\n\t\t}"
] |
[
0.792407214641571,
0.7866622805595398,
0.7357620596885681,
0.7221269011497498,
0.7220130562782288,
0.7094706892967224,
0.7041050791740417,
0.7006880640983582,
0.6978424191474915,
0.6969825029373169,
0.6950753331184387,
0.6941037774085999
] |
Decorate functions that modify the internally stored usernotes JSON.
Ensures that updates are mirrored onto reddit.
Arguments:
func: the function being decorated
|
def update_cache(func):
"""Decorate functions that modify the internally stored usernotes JSON.
Ensures that updates are mirrored onto reddit.
Arguments:
func: the function being decorated
"""
@wraps(func)
def wrapper(self, *args, **kwargs):
"""The wrapper function."""
lazy = kwargs.get('lazy', False)
kwargs.pop('lazy', None)
if not lazy:
self.get_json()
ret = func(self, *args, **kwargs)
# If returning a string assume it is an update message
if isinstance(ret, str) and not lazy:
self.set_json(ret)
else:
return ret
return wrapper
|
[
"def alias_function(function, class_name):\n \"\"\"Create a RedditContentObject function mapped to a BaseReddit function.\n\n The BaseReddit classes define the majority of the API's functions. The\n first argument for many of these functions is the RedditContentObject that\n they operate on. This factory returns functions appropriate to be called on\n a RedditContent object that maps to the corresponding BaseReddit function.\n\n \"\"\"\n @wraps(function)\n def wrapped(self, *args, **kwargs):\n func_args = _make_func_args(function)\n if 'subreddit' in func_args and func_args.index('subreddit') != 1:\n # Only happens for search\n kwargs['subreddit'] = self\n return function(self.reddit_session, *args, **kwargs)\n else:\n return function(self.reddit_session, self, *args, **kwargs)\n # Only grab the short-line doc and add a link to the complete doc\n if wrapped.__doc__ is not None:\n wrapped.__doc__ = wrapped.__doc__.split('\\n', 1)[0]\n wrapped.__doc__ += ('\\n\\nSee :meth:`.{0}.{1}` for complete usage. '\n 'Note that you should exclude the subreddit '\n 'parameter when calling this convenience method.'\n .format(class_name, function.__name__))\n # Don't hide from sphinx as this is a parameter modifying decorator\n return wrapped",
"def updateBeforeDecorator(function):\n \"\"\" Function updateAfterDecorator\n Decorator to ensure local dict is sync with remote foreman\n \"\"\"\n def _updateBeforeDecorator(self, *args, **kwargs):\n if self.forceFullSync:\n self.reload()\n return function(self, *args, **kwargs)\n return _updateBeforeDecorator",
"def updateAfterDecorator(function):\n \"\"\" Function updateAfterDecorator\n Decorator to ensure local dict is sync with remote foreman\n \"\"\"\n def _updateAfterDecorator(self, *args, **kwargs):\n ret = function(self, *args, **kwargs)\n self.reload()\n return ret\n return _updateAfterDecorator",
"def _init_notes(self):\n \"\"\"Set up the UserNotes page with the initial JSON schema.\"\"\"\n self.cached_json = {\n 'ver': self.schema,\n 'users': {},\n 'constants': {\n 'users': [x.name for x in self.subreddit.moderator()],\n 'warnings': Note.warnings\n }\n }\n\n self.set_json('Initializing JSON via puni', True)",
"def _updateRepo(self, func, *args, **kwargs):\n \"\"\"\n Runs the specified function that updates the repo with the specified\n arguments. This method ensures that all updates are transactional,\n so that if any part of the update fails no changes are made to the\n repo.\n \"\"\"\n # TODO how do we make this properly transactional?\n self._repo.open(datarepo.MODE_WRITE)\n try:\n func(*args, **kwargs)\n self._repo.commit()\n finally:\n self._repo.close()",
"def login_required(func):\n '''\n If you decorate a view with this, it will ensure that the current user is\n logged in and authenticated before calling the actual view. (If they are\n not, it calls the :attr:`LoginManager.unauthorized` callback.) For\n example::\n\n @app.route('/post')\n @login_required\n def post():\n pass\n\n If there are only certain times you need to require that your user is\n logged in, you can do so with::\n\n if not current_user.is_authenticated:\n return current_app.login_manager.unauthorized()\n\n ...which is essentially the code that this function adds to your views.\n\n It can be convenient to globally turn off authentication when unit testing.\n To enable this, if the application configuration variable `LOGIN_DISABLED`\n is set to `True`, this decorator will be ignored.\n\n .. Note ::\n\n Per `W3 guidelines for CORS preflight requests\n <http://www.w3.org/TR/cors/#cross-origin-request-with-preflight-0>`_,\n HTTP ``OPTIONS`` requests are exempt from login checks.\n\n :param func: The view function to decorate.\n :type func: function\n '''\n @wraps(func)\n def decorated_view(*args, **kwargs):\n if request.method in EXEMPT_METHODS:\n return func(*args, **kwargs)\n elif current_app.config.get('LOGIN_DISABLED'):\n return func(*args, **kwargs)\n elif not current_user.is_authenticated:\n return current_app.login_manager.unauthorized()\n return func(*args, **kwargs)\n return decorated_view",
"def _sync_notes(self, notes_json):\n \"\"\"\"Populate the user's notes from a JSON encoded list.\"\"\"\n for note_json in notes_json:\n note_id = note_json['id']\n task_id = note_json['item_id']\n if task_id not in self.tasks:\n # ignore orphan notes\n continue\n task = self.tasks[task_id]\n self.notes[note_id] = Note(note_json, task)",
"def add_current_user_is_applied_representation(func):\n \"\"\" Used to decorate Serializer.to_representation method.\n It sets the field \"current_user_is_applied\" if the user is applied to the project\n \"\"\"\n @wraps(func)\n def _impl(self, instance):\n # We pop current_user_is_applied field to avoid AttributeError on default Serializer.to_representation\n ret = func(self, instance)\n\n user = self.context[\"request\"].user\n applied = False\n if not user.is_anonymous():\n try:\n applied = models.Apply.objects.filter(user=user, project=instance).count() > 0\n except:\n pass\n\n ret[\"current_user_is_applied\"] = applied\n\n return ret\n return _impl",
"def notify_owner(func):\n ''' A decorator for mutating methods of property container classes\n that notifies owners of the property container about mutating changes.\n\n Args:\n func (callable) : the container method to wrap in a notification\n\n Returns:\n wrapped method\n\n Examples:\n\n A ``__setitem__`` could be wrapped like this:\n\n .. code-block:: python\n\n # x[i] = y\n @notify_owner\n def __setitem__(self, i, y):\n return super(PropertyValueDict, self).__setitem__(i, y)\n\n The returned wrapped method will have a docstring indicating what\n original method it is wrapping.\n\n '''\n def wrapper(self, *args, **kwargs):\n old = self._saved_copy()\n result = func(self, *args, **kwargs)\n self._notify_owners(old)\n return result\n wrapper.__doc__ = \"Container method ``%s`` instrumented to notify property owners\" % func.__name__\n return wrapper",
"def set_json(self, reason='', new_page=False):\n \"\"\"Send the JSON from the cache to the usernotes wiki page.\n\n Arguments:\n reason: the change reason that will be posted to the wiki changelog\n (str)\n Raises:\n OverflowError if the new JSON data is greater than max_page_size\n \"\"\"\n compressed_json = json.dumps(self._compress_json(self.cached_json))\n\n if len(compressed_json) > self.max_page_size:\n raise OverflowError(\n 'Usernotes page is too large (>{0} characters)'.\n format(self.max_page_size)\n )\n\n if new_page:\n self.subreddit.wiki.create(\n self.page_name,\n compressed_json,\n reason\n )\n # Set the page as hidden and available to moderators only\n self.subreddit.wiki[self.page_name].mod.update(False, permlevel=2)\n else:\n self.subreddit.wiki[self.page_name].edit(\n compressed_json,\n reason\n )",
"def logged_in(f):\n \"\"\"\n Decorator for Page methods that require the user to be authenticated.\n \"\"\"\n\n @wraps(f)\n def wrapped_method(self, *args, **kwargs):\n if not self.reddit.is_oauth_session():\n self.term.show_notification('Not logged in')\n return None\n return f(self, *args, **kwargs)\n return wrapped_method",
"def observed(cls, _func):\n \"\"\"\n Decorate methods to be observable. If they are called on an instance\n stored in a property, the model will emit before and after\n notifications.\n \"\"\"\n\n def wrapper(*args, **kwargs):\n self = args[0]\n assert(isinstance(self, Observable))\n\n self._notify_method_before(self, _func.__name__, args, kwargs)\n res = _func(*args, **kwargs)\n self._notify_method_after(self, _func.__name__, res, args, kwargs)\n return res\n\n return wrapper"
] |
[
0.7108466625213623,
0.699897289276123,
0.6968865990638733,
0.6755101680755615,
0.6744885444641113,
0.6734359264373779,
0.6732186079025269,
0.6724695563316345,
0.6722174286842346,
0.6711249351501465,
0.6701214909553528,
0.6627087593078613
] |
Create an unbound prototype of this class, partially applying arguments
.. code:: python
controller = Controller.s(interval=20)
pipeline = controller(rate=10) >> pool
|
def s(cls: Type[C], *args, **kwargs) -> Partial[C]:
"""
Create an unbound prototype of this class, partially applying arguments
.. code:: python
controller = Controller.s(interval=20)
pipeline = controller(rate=10) >> pool
"""
return Partial(cls, *args, **kwargs)
|
[
"def s(self, *args, **kwargs) -> Partial[Stepwise]:\n \"\"\"\n Create an unbound prototype of this class, partially applying arguments\n\n .. code:: python\n\n @stepwise\n def control(pool: Pool, interval):\n return 10\n\n pipeline = control.s(interval=20) >> pool\n\n :note: The partial rules are sealed, and :py:meth:`~.UnboundStepwise.add`\n cannot be called on it.\n \"\"\"\n return Partial(Stepwise, self.base, *self.rules, *args, **kwargs)",
"async def pipeline(self, transaction=None, shard_hint=None, watches=None):\n \"\"\"\n Cluster impl:\n Pipelines do not work in cluster mode the same way they do in normal mode.\n Create a clone of this object so that simulating pipelines will work correctly.\n Each command will be called directly when used and when calling execute() will only return the result stack.\n cluster transaction can only be run with commands in the same node, otherwise error will be raised.\n \"\"\"\n await self.connection_pool.initialize()\n if shard_hint:\n raise RedisClusterException(\"shard_hint is deprecated in cluster mode\")\n\n from aredis.pipeline import StrictClusterPipeline\n return StrictClusterPipeline(\n connection_pool=self.connection_pool,\n startup_nodes=self.connection_pool.nodes.startup_nodes,\n result_callbacks=self.result_callbacks,\n response_callbacks=self.response_callbacks,\n transaction=transaction,\n watches=watches\n )",
"def from_id(cls, pipeline_id, resolve_outputs=True, _pipeline_record=None):\n \"\"\"Returns an instance corresponding to an existing Pipeline.\n\n The returned object will have the same properties a Pipeline does while\n it's running synchronously (e.g., like what it's first allocated), allowing\n callers to inspect caller arguments, outputs, fill slots, complete the\n pipeline, abort, retry, etc.\n\n Args:\n pipeline_id: The ID of this pipeline (a string).\n resolve_outputs: When True, dereference the outputs of this Pipeline\n so their values can be accessed by the caller.\n _pipeline_record: Internal-only. The _PipelineRecord instance to use\n to instantiate this instance instead of fetching it from\n the datastore.\n\n Returns:\n Pipeline sub-class instances or None if it could not be found.\n \"\"\"\n pipeline_record = _pipeline_record\n\n # Support pipeline IDs and idempotence_keys that are not unicode.\n if not isinstance(pipeline_id, unicode):\n try:\n pipeline_id = pipeline_id.encode('utf-8')\n except UnicodeDecodeError:\n pipeline_id = hashlib.sha1(pipeline_id).hexdigest()\n\n pipeline_key = db.Key.from_path(_PipelineRecord.kind(), pipeline_id)\n\n if pipeline_record is None:\n pipeline_record = db.get(pipeline_key)\n if pipeline_record is None:\n return None\n\n try:\n pipeline_func_class = mr_util.for_name(pipeline_record.class_path)\n except ImportError, e:\n logging.warning('Tried to find Pipeline %s#%s, but class could '\n 'not be found. Using default Pipeline class instead.',\n pipeline_record.class_path, pipeline_id)\n pipeline_func_class = cls\n\n params = pipeline_record.params\n arg_list, kwarg_dict = _dereference_args(\n pipeline_record.class_path, params['args'], params['kwargs'])\n outputs = PipelineFuture(pipeline_func_class.output_names)\n outputs._inherit_outputs(\n pipeline_record.class_path,\n params['output_slots'],\n resolve_outputs=resolve_outputs)\n\n stage = pipeline_func_class(*arg_list, **kwarg_dict)\n stage.backoff_seconds = params['backoff_seconds']\n stage.backoff_factor = params['backoff_factor']\n stage.max_attempts = params['max_attempts']\n stage.task_retry = params['task_retry']\n stage.target = params.get('target') # May not be defined for old Pipelines\n stage._current_attempt = pipeline_record.current_attempt\n stage._set_values_internal(\n _PipelineContext('', params['queue_name'], params['base_path']),\n pipeline_key,\n _PipelineRecord.root_pipeline.get_value_for_datastore(pipeline_record),\n outputs,\n pipeline_record.status)\n return stage",
"def interval(host, time, actor, method, *args, **kwargs):\n '''Creates an Event attached to the host for management that will\n execute the *method* of the *actor* every *time* seconds.\n\n See example in :ref:`sample_inter`\n\n :param Proxy host: host that will manage the interval, commonly the\n host of the actor.\n :param int time: seconds for the intervals.\n :param Proxy actor: actor to which make the call every *time* seconds.\n :param Str. method: method of the *actor* to be called.\n :param list args: arguments for *method*.\n :return: :class:`Event` instance of the interval.\n '''\n call = getattr(actor, method, None)\n if not callable(call):\n raise IntervalError(\"The actor %s does not have the method %s.\"\n % (actor.get_id(), method))\n if call.__class__.__name__ in [\"TellWrapper\", \"TellRefWrapper\"]:\n # If the method is a normal tell, the interval thread can send\n # the calls normally.\n # It it is a Ref Tell, the proxies in the args would be parsed\n # during the call to this very method. So the call can be made\n # as a normall Tell. The actor will do the loads normally on the\n # receive as it has its methods marked as ref.\n if call.__class__.__name__ is \"TellRefWrapper\":\n call.__call__ = TellWrapper.__call__\n\n return intervals.interval_host(host, time, call, *args, **kwargs)\n else:\n raise IntervalError(\"The callable for the interval must be a tell \\\n method of the actor.\")",
"async def build_hardware_controller(\n cls, config: robot_configs.robot_config = None,\n port: str = None,\n loop: asyncio.AbstractEventLoop = None,\n force: bool = False) -> 'API':\n \"\"\" Build a hardware controller that will actually talk to hardware.\n\n This method should not be used outside of a real robot, and on a\n real robot only one true hardware controller may be active at one\n time.\n\n :param config: A config to preload. If not specified, load the default.\n :param port: A port to connect to. If not specified, the default port\n (found by scanning for connected FT232Rs).\n :param loop: An event loop to use. If not specified, use the result of\n :py:meth:`asyncio.get_event_loop`.\n :param force: If `True`, connect even if a lockfile is present. See\n :py:meth:`Controller.__init__`.\n \"\"\"\n if None is Controller:\n raise RuntimeError(\n 'The hardware controller may only be instantiated on a robot')\n checked_loop = loop or asyncio.get_event_loop()\n backend = Controller(config, checked_loop, force=force)\n await backend.connect(port)\n return cls(backend, config=config, loop=checked_loop)",
"def pipeline(self):\n \"\"\"Returns :class:`Pipeline` object to execute bulk of commands.\n\n It is provided for convenience.\n Commands can be pipelined without it.\n\n Example:\n\n >>> pipe = redis.pipeline()\n >>> fut1 = pipe.incr('foo') # NO `await` as it will block forever!\n >>> fut2 = pipe.incr('bar')\n >>> result = await pipe.execute()\n >>> result\n [1, 1]\n >>> await asyncio.gather(fut1, fut2)\n [1, 1]\n >>> #\n >>> # The same can be done without pipeline:\n >>> #\n >>> fut1 = redis.incr('foo') # the 'INCRY foo' command already sent\n >>> fut2 = redis.incr('bar')\n >>> await asyncio.gather(fut1, fut2)\n [2, 2]\n \"\"\"\n return Pipeline(self._pool_or_conn, self.__class__,\n loop=self._pool_or_conn._loop)",
"def pipeline(self, transaction=True, shard_hint=None):\n \"\"\"\n Return a new pipeline object that can queue multiple commands for\n later execution. ``transaction`` indicates whether all commands\n should be executed atomically. Apart from making a group of operations\n atomic, pipelines are useful for reducing the back-and-forth overhead\n between the client and server.\n\n Overridden in order to provide the right client through the pipeline.\n \"\"\"\n p = Pipeline(\n connection_pool=self.connection_pool,\n response_callbacks=self.response_callbacks,\n transaction=transaction,\n shard_hint=shard_hint)\n p.setEncoder(self._encoder)\n p.setDecoder(self._decoder)\n return p",
"def pipeline_control_new(rst, clk, rx_rdy, rx_vld, tx_rdy, tx_vld, stage_enable, stop_rx=None, stop_tx=None):\n \"\"\" Pipeline control unit\n rx_rdy, rx_vld, - (o)(i) handshake at the pipeline input (front of the pipeline)\n tx_rdy, tx_vld, - (i)(o) handshake at the pipeline output (back of the pipeline)\n stage_enable - (o) vector of enable signals, one signal per stage, that controls the data registration in the stages;\n The length of this vector determines the number of stages in the pipeline\n stop_rx - (i) optional, vector of signals, one signal per stage; when asserted, the corresponding stage stops consuming data;\n allows for multicycle execution in a stage (e.g. consume a data, then process it multiple cycles)\n stop_tx - (i) optional, vector of signals, one signal per stage; when asserted, the corresponding stage stops producing data;\n allows for multicycle execution in a stage (consume multiple data to produce single data )\n\n stop_rx and stop_tx - If you do not need them, then do not connect them\n \"\"\"\n\n NUM_STAGES = len(stage_enable)\n\n if (stop_rx == None):\n stop_rx = Signal(intbv(0)[NUM_STAGES:])\n\n if (stop_tx == None):\n stop_tx = Signal(intbv(0)[NUM_STAGES:])\n\n assert (len(stop_rx)==NUM_STAGES), \"pipeline_control: expects len(stop_rx)=len(stage_enable), but len(stop_rx)={} len(stage_enable)={}\".format(len(stop_rx),NUM_STAGES)\n assert (len(stop_tx)==NUM_STAGES), \"pipeline_control: expects len(stop_tx)=len(stage_enable), but len(stop_tx)={} len(stage_enable)={}\".format(len(stop_tx),NUM_STAGES)\n\n rdy = [Signal(bool(0)) for _ in range(NUM_STAGES+1)]\n vld = [Signal(bool(0)) for _ in range(NUM_STAGES+1)]\n BC = NUM_STAGES*[False]\n en = [Signal(bool(0)) for _ in range(NUM_STAGES)]\n stop_rx_s = [Signal(bool(0)) for _ in range(NUM_STAGES)]\n stop_tx_s = [Signal(bool(0)) for _ in range(NUM_STAGES)]\n\n rdy[0] = rx_rdy\n vld[0] = rx_vld\n rdy[-1] = tx_rdy\n vld[-1] = tx_vld\n BC[-1] = True\n\n stg = [None for _ in range(NUM_STAGES)]\n\n for i in range(NUM_STAGES):\n stg[i] = _stage_ctrl(rst = rst,\n clk = clk,\n rx_rdy = rdy[i],\n rx_vld = vld[i],\n tx_rdy = rdy[i+1],\n tx_vld = vld[i+1],\n stage_en = en[i],\n stop_rx = stop_rx_s[i],\n stop_tx = stop_tx_s[i],\n BC = BC[i])\n\n x = en[0] if NUM_STAGES==1 else ConcatSignal(*reversed(en))\n\n @always_comb\n def _comb():\n stage_enable.next = x\n for i in range(NUM_STAGES):\n stop_rx_s[i].next = stop_rx[i]\n stop_tx_s[i].next = stop_tx[i]\n\n return instances()",
"def super_pipe(self):\n \"\"\"\n Creates a mechanism for us to internally bind two different\n operations together in a shared pipeline on the class.\n This will temporarily set self._pipe to be this new pipeline,\n during this context and then when it leaves the context\n reset self._pipe to its original value.\n\n Example:\n def get_set(self, key, val)\n with self.super_pipe as pipe:\n res = self.get(key)\n self.set(key, val)\n return res\n\n This will have the effect of using only one network round trip if no\n pipeline was passed to the constructor.\n\n This method is still considered experimental and we are working out\n the details, so don't use it unless you feel confident you have a\n legitimate use-case for using this.\n \"\"\"\n orig_pipe = self._pipe\n\n def exit_handler():\n self._pipe = orig_pipe\n\n self._pipe = autoexec(orig_pipe, name=self.connection,\n exit_handler=exit_handler)\n\n return self._pipe",
"async def pipeline(self, transaction=True, shard_hint=None):\n \"\"\"\n Return a new pipeline object that can queue multiple commands for\n later execution. ``transaction`` indicates whether all commands\n should be executed atomically. Apart from making a group of operations\n atomic, pipelines are useful for reducing the back-and-forth overhead\n between the client and server.\n \"\"\"\n from aredis.pipeline import StrictPipeline\n pipeline = StrictPipeline(self.connection_pool, self.response_callbacks,\n transaction, shard_hint)\n await pipeline.reset()\n return pipeline",
"function (action, payload, args, immediate) {\n var instruction = this.pool.create(action, payload, args);\n\n // based on whether the immediate flag is set, add to the top or bottom of the instruction queue.\n (immediate ? this.pool.unshift : this.pool.push)(instruction);\n\n return instruction;\n }",
"def runController(callable_, *args, **kargs):\n \"\"\"Callable greenlet implementing controller logic.\"\"\"\n global execQueue\n\n # initialize and run root future\n rootId = (-1, 0)\n\n # initialise queue\n if execQueue is None:\n execQueue = FutureQueue()\n\n sys.excepthook = advertiseBrokerWorkerDown\n\n if scoop.DEBUG:\n from scoop import _debug\n _debug.redirectSTDOUTtoDebugFile()\n\n # TODO: Make that a function\n # Wait until we received the main module if we are a headless slave\n headless = scoop.CONFIGURATION.get(\"headless\", False)\n if not scoop.MAIN_MODULE:\n # If we're not the origin and still don't have our main_module,\n # wait for it and then import it as module __main___\n main = scoop.shared.getConst('__MAIN_MODULE__', timeout=float('inf'))\n directory_name = tempfile.mkdtemp()\n os.chdir(directory_name)\n scoop.MAIN_MODULE = main.writeFile(directory_name)\n from .bootstrap.__main__ import Bootstrap as SCOOPBootstrap\n newModule = SCOOPBootstrap.setupEnvironment()\n sys.modules['__main__'] = newModule\n elif scoop.IS_ORIGIN and headless and scoop.MAIN_MODULE:\n # We're the origin, share our main_module\n scoop.shared.setConst(\n __MAIN_MODULE__=scoop.encapsulation.ExternalEncapsulation(\n scoop.MAIN_MODULE,\n )\n )\n # TODO: use modulefinder to share every local dependency of\n # main module\n\n # launch future if origin or try to pickup a future if slave worker\n if scoop.IS_ORIGIN:\n future = Future(rootId, callable_, *args, **kargs)\n else:\n future = execQueue.pop()\n\n future.greenlet = greenlet.greenlet(runFuture)\n future = future._switch(future)\n\n if scoop.DEBUG:\n lastDebugTs = time.time()\n\n while not scoop.IS_ORIGIN or future.parentId != rootId or not future._ended():\n if scoop.DEBUG and time.time() - lastDebugTs > scoop.TIME_BETWEEN_PARTIALDEBUG:\n _debug.writeWorkerDebug(\n debug_stats,\n QueueLength,\n \"debug/partial-{0}\".format(\n round(time.time(), -1)\n )\n )\n lastDebugTs = time.time()\n # process future\n if future._ended():\n # future is finished\n if future.id[0] != scoop.worker:\n # future is not local\n execQueue.sendResult(future)\n future = execQueue.pop()\n else:\n # future is local, parent is waiting\n if future.index is not None:\n try:\n parent = futureDict[future.parentId]\n except KeyError:\n # Job has no parent here (probably children restart)\n future = execQueue.pop()\n else:\n if parent.exceptionValue is None:\n future = parent._switch(future)\n else:\n future = execQueue.pop()\n else:\n future = execQueue.pop()\n else:\n # future is in progress; run next future from pending execution queue.\n future = execQueue.pop()\n\n if not future._ended() and future.greenlet is None:\n # initialize if the future hasn't started\n future.greenlet = greenlet.greenlet(runFuture)\n future = future._switch(future)\n\n execQueue.shutdown()\n if future.exceptionValue:\n print(future.exceptionTraceback)\n sys.exit(1)\n return future.resultValue"
] |
[
0.8144322037696838,
0.6688690781593323,
0.6525091528892517,
0.6471115350723267,
0.6442935466766357,
0.6442053914070129,
0.6432787179946899,
0.6421696543693542,
0.6420442461967468,
0.6401344537734985,
0.6352636218070984,
0.6345563530921936
] |
Collect all bases and organize into parent/child mappings.
|
def _build_mappings(
self, classes: Sequence[type]
) -> Tuple[Mapping[type, Sequence[type]], Mapping[type, Sequence[type]]]:
"""
Collect all bases and organize into parent/child mappings.
"""
parents_to_children: MutableMapping[type, Set[type]] = {}
children_to_parents: MutableMapping[type, Set[type]] = {}
visited_classes: Set[type] = set()
class_stack = list(classes)
while class_stack:
class_ = class_stack.pop()
if class_ in visited_classes:
continue
visited_classes.add(class_)
for base in class_.__bases__:
if base not in visited_classes:
class_stack.append(base)
parents_to_children.setdefault(base, set()).add(class_)
children_to_parents.setdefault(class_, set()).add(base)
sorted_parents_to_children: MutableMapping[
type, List[type]
] = collections.OrderedDict()
for parent, children in sorted(
parents_to_children.items(), key=lambda x: (x[0].__module__, x[0].__name__)
):
sorted_parents_to_children[parent] = sorted(
children, key=lambda x: (x.__module__, x.__name__)
)
sorted_children_to_parents: MutableMapping[
type, List[type]
] = collections.OrderedDict()
for child, parents in sorted(
children_to_parents.items(), key=lambda x: (x[0].__module__, x[0].__name__)
):
sorted_children_to_parents[child] = sorted(
parents, key=lambda x: (x.__module__, x.__name__)
)
return sorted_parents_to_children, sorted_children_to_parents
|
[
"def recursive_bases(self):\n \"\"\"list of all :class:`base classes <hierarchy_info_t>`\"\"\"\n if self._recursive_bases is None:\n to_go = self.bases[:]\n all_bases = []\n while to_go:\n base = to_go.pop()\n if base not in all_bases:\n all_bases.append(base)\n to_go.extend(base.related_class.bases)\n self._recursive_bases = all_bases\n return self._recursive_bases",
"def _determinebase_address(self):\n \"\"\"\n The basic idea is simple: start from a specific point, try to construct\n functions as much as we can, and maintain a function distribution graph\n and a call graph simultaneously. Repeat searching until we come to the\n end that there is no new function to be found.\n A function should start with:\n # some addresses that a call exit leads to, or\n # certain instructions. They are recoreded in SimArch.\n\n For a better performance, instead of blindly scanning the entire process\n space, we first try to search for instruction patterns that a function\n may start with, and start scanning at those positions. Then we try to\n decode anything that is left.\n \"\"\"\n\n traced_address = set()\n self.functions = set()\n self.call_map = networkx.DiGraph()\n self.cfg = networkx.DiGraph()\n initial_state = self.project.factory.blank_state(mode=\"fastpath\")\n initial_options = initial_state.options - { o.TRACK_CONSTRAINTS } - o.refs\n initial_options |= { o.SUPER_FASTPATH }\n # initial_options.remove(o.COW_STATES)\n initial_state.options = initial_options\n # Sadly, not all calls to functions are explicitly made by call\n # instruction - they could be a jmp or b, or something else. So we\n # should record all exits from a single function, and then add\n # necessary calling edges in our call map during the post-processing\n # phase.\n function_exits = defaultdict(set)\n\n dump_file_prefix = self.project.filename\n\n if self._pickle_intermediate_results and \\\n os.path.exists(dump_file_prefix + \"_indirect_jumps.angr\"):\n l.debug(\"Loading existing intermediate results.\")\n self._indirect_jumps = pickle.load(open(dump_file_prefix + \"_indirect_jumps.angr\", \"rb\"))\n self.cfg = pickle.load(open(dump_file_prefix + \"_coercecfg.angr\", \"rb\"))\n self._unassured_functions = pickle.load(open(dump_file_prefix + \"_unassured_functions.angr\", \"rb\"))\n else:\n # Performance boost :-)\n # Scan for existing function prologues\n self._scan_function_prologues(traced_address, function_exits, initial_state)\n\n if self._pickle_intermediate_results:\n l.debug(\"Dumping intermediate results.\")\n pickle.dump(self._indirect_jumps, open(dump_file_prefix + \"_indirect_jumps.angr\", \"wb\"), -1)\n pickle.dump(self.cfg, open(dump_file_prefix + \"_coercecfg.angr\", \"wb\"), -1)\n pickle.dump(self._unassured_functions, open(dump_file_prefix + \"_unassured_functions.angr\", \"wb\"), -1)\n\n if len(self._indirect_jumps):\n # We got some indirect jumps!\n # Gotta execute each basic block and see where it wants to jump to\n function_starts = self._process_indirect_jumps()\n\n self.base_address = self._solve_forbase_address(function_starts, self._unassured_functions)\n\n l.info(\"Base address should be 0x%x\", self.base_address)\n\n else:\n l.debug(\"No indirect jumps are found. We switch to the slowpath mode.\")\n # TODO: Slowpath mode...\n while True:\n next_addr = self._get_next_code_addr(initial_state)\n percentage = self._seg_list.occupied_size * 100.0 / (self._valid_memory_region_size)\n l.info(\"Analyzing %xh, progress %0.04f%%\", next_addr, percentage)\n if next_addr is None:\n break\n\n self.call_map.add_node(next_addr)\n\n self._scan_code(traced_address, function_exits, initial_state, next_addr)\n\n # Post-processing: Map those calls that are not made by call/blr\n # instructions to their targets in our map\n for src, s in function_exits.items():\n if src in self.call_map:\n for target in s:\n if target in self.call_map:\n self.call_map.add_edge(src, target)\n\n nodes = sorted(self.call_map.nodes())\n for i in range(len(nodes) - 1):\n if nodes[i] >= nodes[i + 1] - 4:\n for dst in self.call_map.successors(nodes[i + 1]):\n self.call_map.add_edge(nodes[i], dst)\n for src in self.call_map.predecessors(nodes[i + 1]):\n self.call_map.add_edge(src, nodes[i])\n self.call_map.remove_node(nodes[i + 1])\n\n l.debug(\"Construction finished.\")",
"def get_bases(definition_dict, loader):\n \"\"\"Collect dependencies.\n\n \"\"\"\n bases = definition_dict.get('bases', ())\n if bases:\n bases = (loader.get_comp_dict(required_version=SPEC_VERSION_TUPLE[0],\n **b)\n for b in bases)\n return SimpleChainmap(definition_dict, *bases)\n else:\n return definition_dict",
"def build_from_bases(bases, classdict, attr, attr_dict):\n \"\"\"Helper function to build private HasProperties attributes\"\"\"\n output = OrderedDict()\n output_keys = set()\n all_bases = []\n # Go through the bases from furthest to nearest ancestor\n for base in reversed(bases):\n # Only keep the items that are still defined on the bases\n if base is not object and isinstance(base, PropertyMetaclass):\n output_keys = output_keys.union(getattr(base, attr))\n # Collect all bases so we ensure overridden items are assigned\n # in the correct order\n for item in reversed(base.__mro__):\n if item is object or not isinstance(item, PropertyMetaclass):\n continue\n if item not in all_bases:\n all_bases.append(item)\n # Update the items in reverse MRO order; only keep those that are\n # defined on the bases\n for base in all_bases:\n for key, val in iteritems(getattr(base, attr)):\n if key in base.__dict__ and key in output_keys:\n output.update({key: val})\n # Remove all items that were overridden by this class; this is\n # potentially a superset of the items added back in the next step.\n for key in classdict:\n if key in output:\n output.pop(key)\n # Update the items with those defined on this class\n output.update(attr_dict)\n return output",
"def _get_bases(cls, ab):\n \"\"\"\n Start Bases & End Bases\n :param ab: at bat object(type:Beautifulsoup)\n :param attribute_name: attribute name\n :return: start base, end base\n \"\"\"\n start_bases, end_bases = [], []\n for base in ('1B', '2B', '3B'):\n if ab.find('runner', start=base):\n start_bases.append(base[0:1])\n else:\n start_bases.append('_')\n if ab.find('runner', end=base):\n end_bases.append(base[0:1])\n else:\n end_bases.append('_')\n return ''.join(start_bases), ''.join(end_bases)",
"def sorted_bases(bases):\n '''If a class subclasses each class in bases (in that order), then\n this function returns the would-be python mro for the created class,\n minus <object>.\n '''\n ret = []\n\n for base in bases:\n # lst = [super(base), super(super(base)), ..., highest_base]\n lst = _bases(base)\n\n if not ret:\n ret = lst\n elif not any(b in ret for b in lst):\n ret += lst\n else:\n buf = []\n for b in lst:\n if b in ret:\n if buf:\n ret = graft(ret, buf, ret.index(b))\n buf = []\n else:\n buf.append(b)\n if buf:\n ret += buf\n\n return ret",
"void initBaseCoord() {\n baseCoords = new double[dimension + 1][];\n for (int i = 0; i < baseCoords.length; i++) {\n baseCoords[i] = lowerBound.clone();\n if (i > 0) {\n baseCoords[i][i - 1] = upperBound[i - 1];\n }\n }\n }",
"function (base) {\n if (this.union === undefined) {\n if (base === undefined) {\n return this.baseURI;\n } else {\n this.baseURI = base;\n return this;\n }\n } else if (base === undefined) {\n return this.union[0].base();\n } else {\n $.each(this.union, function (i, databank) {\n databank.base(base);\n });\n return this;\n }\n }",
"private void initBase(double[] lowerBound, double[] upperBound, boolean extendBound) {\n base = new Base(lowerBound, upperBound, extendBound);\n backupBase = base;\n baseGrid = new BaseGrid(base);\n }",
"def unify_basis(self, keys=None, basis=None):\n \"\"\"\n Give everything, or everything in the list of keys, the same basis.\n If you don't provide a basis, welly will try to get one using\n ``survey_basis()``.\n\n Args:\n basis (ndarray): A basis: the regularly sampled depths at which\n you want the samples.\n keys (list): List of strings: the keys of the data items to\n unify, if not all of them.\n\n Returns:\n None. Works in place.\n \"\"\"\n if keys is None:\n keys = [k for k, v in self.data.items() if isinstance(v, Curve)]\n else:\n keys = utils.flatten_list(keys)\n\n if basis is None:\n basis = self.survey_basis(keys=keys)\n if basis is None:\n m = \"No basis was provided and welly could not retrieve common basis.\"\n raise WellError(m)\n\n for k in keys:\n if keys and (k not in keys):\n continue\n try: # To treat as a curve.\n self.data[k] = self.data[k].to_basis(basis)\n except: # It's probably a striplog.\n continue\n\n return",
"def bases\n file = \"/proc/#{@pid}/maps\"\n stat = MemoryIO::Util.file_permission(file)\n return {} unless stat && stat.readable?\n\n maps = ::IO.binread(file).split(\"\\n\").map do |line|\n # 7f76515cf000-7f76515da000 r-xp 00000000 fd:01 29360257 /lib/x86_64-linux-gnu/libnss_files-2.24.so\n addr, _perm, _offset, _dev, _inode, pathname = line.strip.split(' ', 6)\n next nil if pathname.nil?\n\n addr = addr.to_i(16)\n pathname = pathname[1..-2] if pathname =~ /^\\[.+\\]$/\n pathname = ::File.basename(pathname)\n [MemoryIO::Util.trim_libname(pathname).to_sym, addr]\n end\n maps.compact.reverse.to_h\n end",
"def build(self, start, end, symbols=None):\n \"\"\"Return the list of basic blocks.\n\n :int start: Start address of the disassembling process.\n :int end: End address of the disassembling process.\n\n \"\"\"\n symbols = {} if not symbols else symbols\n\n # First pass: Recover BBs.\n bbs = self._recover_bbs(start, end, symbols)\n\n # Second pass: Split overlapping basic blocks introduced by back edges.\n bbs = self._split_bbs(bbs, symbols)\n\n # Third pass: Extract call targets for further analysis.\n call_targets = self._extract_call_targets(bbs)\n\n return bbs, call_targets"
] |
[
0.7023827433586121,
0.6952263116836548,
0.6884115934371948,
0.6778467893600464,
0.6761168241500854,
0.6727396845817566,
0.6723271012306213,
0.6719276309013367,
0.6717371940612793,
0.6709029674530029,
0.6694896221160889,
0.6686638593673706
] |
Collect all classes defined in/under ``package_paths``.
|
def _collect_classes(
self, package_paths: Sequence[str], recurse_subpackages: bool = True
) -> Sequence[type]:
"""
Collect all classes defined in/under ``package_paths``.
"""
import uqbar.apis
classes = []
initial_source_paths: Set[str] = set()
# Graph source paths and classes
for path in package_paths:
try:
module = importlib.import_module(path)
if hasattr(module, "__path__"):
initial_source_paths.update(getattr(module, "__path__"))
else:
initial_source_paths.add(module.__file__)
except ModuleNotFoundError:
path, _, class_name = path.rpartition(".")
module = importlib.import_module(path)
classes.append(getattr(module, class_name))
# Iterate source paths
for source_path in uqbar.apis.collect_source_paths(
initial_source_paths, recurse_subpackages=recurse_subpackages
):
package_path = uqbar.apis.source_path_to_package_path(source_path)
module = importlib.import_module(package_path)
# Grab any defined classes
for name in dir(module):
if name.startswith("_"):
continue
object_ = getattr(module, name)
if isinstance(object_, type) and object_.__module__ == module.__name__:
classes.append(object_)
return sorted(classes, key=lambda x: (x.__module__, x.__name__))
|
[
"def collect_dependency_paths(package_name):\n \"\"\"\n TODO docstrings\n \"\"\"\n deps = []\n try:\n dist = pkg_resources.get_distribution(package_name)\n except (pkg_resources.DistributionNotFound, ValueError):\n message = \"Distribution '{}' not found.\".format(package_name)\n raise RequirementNotFoundError(message)\n\n if dist.has_metadata('top_level.txt'):\n for line in dist.get_metadata('top_level.txt').split():\n # do not consider subpackages (e.g. the form 'package/subpackage')\n if not os.path.split(line)[0]:\n pkg = os.path.join(dist.location, line)\n # handle single module packages\n if not os.path.isdir(pkg) and os.path.exists(pkg+'.py'):\n pkg += '.py'\n deps.append(pkg)\n\n for req in dist.requires():\n deps.extend(collect_dependency_paths(req.project_name))\n\n return deps",
"def load_classes(package_str):\n '''Load all classes from modules of a given `package_str`. All class instances are stored in a case-insensitive `dict`\n and returned. If a package doesn't contain any class `None` is returned'''\n\n _logger.debug('Loading all modules from %s', package_str)\n package = importlib.import_module(package_str)\n package_path = package.__path__\n _logger.debug('Searching for modules in package %s (%s)', package_str, package_path)\n for _, name, ispkg in pkgutil.iter_modules(package_path, package_str + \".\"):\n if not ispkg:\n _logger.debug('Found module: %s', name)\n module = importlib.import_module(name)\n if hasattr(module, CLASS_NAME_ATTR):\n class_name = getattr(module, CLASS_NAME_ATTR)\n _logger.debug('Found class: %s', class_name)\n clasz = getattr(module, class_name)\n\n if package_str not in _dynamo_cache:\n _dynamo_cache[package_str] = CaseInsensitiveDict()\n \n if class_name not in _dynamo_cache[package_str]:\n _dynamo_cache[package_str][class_name] = clasz\n _logger.debug('Correctly loaded class: %s from: \"%s\"', class_name, package_str)\n else:\n _logger.warning('Already loaded class: %s from: \"%s\"', class_name, package_str)\n else:\n _logger.warning('Module inside %s does not contain required attribute: %s', package_str, CLASS_NAME_ATTR)\n else:\n _logger.warning('Ignoring package: %s', name)\n\n if package_str in _dynamo_cache:\n return _dynamo_cache[package_str]\n else:\n return None",
"def discover(package, cls_match_func):\n \"\"\"Returns a set of classes in the directory matched by cls_match_func\n\n Args:\n path - A Python package\n cls_match_func - Function taking a class and returning true if the\n class is to be included in the output.\n \"\"\"\n matched_classes = set()\n\n for _, module_name, _ in pkgutil.walk_packages(\n package.__path__,\n prefix=package.__name__ + '.',\n ):\n module = __import__(module_name, fromlist=[str('__trash')], level=0)\n\n # Check all the classes in that module\n for _, imported_class in inspect.getmembers(module, inspect.isclass):\n # Don't include things that are only there due to a side-effect of\n # importing\n if imported_class.__module__ != module.__name__:\n continue\n\n if cls_match_func(imported_class):\n matched_classes.add(imported_class)\n\n return matched_classes",
"def load_classes(cls, fail_silently=True):\n \"\"\"Load all the classes for a plugin.\n\n Produces a sequence containing the identifiers and their corresponding\n classes for all of the available instances of this plugin.\n\n fail_silently causes the code to simply log warnings if a\n plugin cannot import. The goal is to be able to use part of\n libraries from an XBlock (and thus have it installed), even if\n the overall XBlock cannot be used (e.g. depends on Django in a\n non-Django application). There is disagreement about whether\n this is a good idea, or whether we should see failures early\n (e.g. on startup or first page load), and in what\n contexts. Hence, the flag.\n \"\"\"\n all_classes = itertools.chain(\n pkg_resources.iter_entry_points(cls.entry_point),\n (entry_point for identifier, entry_point in cls.extra_entry_points),\n )\n for class_ in all_classes:\n try:\n yield (class_.name, cls._load_class_entry_point(class_))\n except Exception: # pylint: disable=broad-except\n if fail_silently:\n log.warning('Unable to load %s %r', cls.__name__, class_.name, exc_info=True)\n else:\n raise",
"def __get_extra_extension_classes(paths):\n \"\"\"\n Banana banana\n \"\"\"\n extra_classes = []\n wset = pkg_resources.WorkingSet([])\n distributions, _ = wset.find_plugins(pkg_resources.Environment(paths))\n\n for dist in distributions:\n sys.path.append(dist.location)\n wset.add(dist)\n\n for entry_point in wset.iter_entry_points(group='hotdoc.extensions',\n name='get_extension_classes'):\n try:\n activation_function = entry_point.load()\n classes = activation_function()\n # pylint: disable=broad-except\n except Exception as exc:\n info(\"Failed to load %s %s\" % (entry_point.module_name, exc))\n debug(traceback.format_exc())\n continue\n\n for klass in classes:\n extra_classes.append(klass)\n\n return extra_classes",
"def collect_subclasses(self): # type: () -> Dict[str, List[Tuple[str, str]]]\n \"\"\"\n Collect all subclasses of user-defined base classes from project.\n :return: Dictionary from module name to list of tuples.\n First element of tuple is model name and second is alias.\n Currently we set alias equal to model name,\n but in future functionality of aliasing subclasses can be added.\n \"\"\"\n result = {} # type: Dict[str, List[Tuple[str, str]]]\n for loader, module_name, is_pkg in walk_packages(path=[settings.BASE_DIR]):\n subclasses_from_module = self._collect_classes_from_module(module_name)\n if subclasses_from_module:\n result[module_name] = subclasses_from_module\n return result",
"public static Set<Class<?>> getAllClasses(final String packagePath)\n\t\tthrows ClassNotFoundException, IOException, URISyntaxException\n\t{\n\t\treturn getAllAnnotatedClasses(packagePath, null);\n\t}",
"def collect(basepath, exclude=None, processPlugins=True):\n \"\"\"\n Collects all the packages associated with the inputted filepath.\n \n :param module | <module>\n \n :return ([<str> pkg, ..], [(<str> path, <str> relpath), ..] data)\n \"\"\"\n if exclude is None:\n exclude = ['.py', '.pyc', '.pyo', '.css', '.exe']\n\n imports = []\n datas = []\n\n # walk the folder structure looking for all packages and data files\n basename = os.path.basename(basepath)\n basepath = os.path.abspath(basepath)\n baselen = len(basepath) - len(basename)\n\n plugfiles = []\n\n for root, folders, files in os.walk(basepath):\n if '.svn' in root or '.git' in root:\n continue\n\n # mark the plugins file for load\n plugdata = None\n if processPlugins and '__plugins__.py' in files:\n filename = os.path.join(root, '__plugins__.py')\n package = projex.packageFromPath(filename) + '.__plugins__'\n pkgpath = projex.packageRootPath(filename)\n\n if pkgpath not in sys.path:\n sys.path.insert(0, pkgpath)\n\n # import the plugins module\n __import__(package)\n pkg = sys.modules[package]\n\n recurse = getattr(pkg, '__recurse__', False)\n plugdata = {'recurse': recurse,\n 'packages': [],\n 'path': root}\n\n plugfiles.append(plugdata)\n\n # look for any recursion plugins\n else:\n for data in plugfiles:\n if data['recurse'] and root.startswith(data['path']):\n plugdata = data\n break\n\n if plugdata is not None:\n packages = plugdata['packages']\n\n # include package plugins\n for folder in folders:\n pkgpath = os.path.join(root, folder, '__init__.py')\n if os.path.exists(pkgpath):\n packages.append(projex.packageFromPath(pkgpath))\n\n for file_ in files:\n module, ext = os.path.splitext(file_)\n\n # look for python modules\n if ext == '.py':\n package_path = projex.packageFromPath(os.path.join(root, file_))\n if not package_path:\n continue\n\n if module != '__init__':\n package_path += '.' + module\n\n imports.append(package_path)\n\n # test to see if this is a plugin file\n if plugdata is not None and module not in ('__init__',\n '__plugins__'):\n plugdata['packages'].append(package_path)\n\n # look for data\n elif ext not in exclude:\n src = os.path.join(root, file_)\n targ = os.path.join(root[baselen:])\n datas.append((src, targ))\n\n # save the plugin information\n for plugdata in plugfiles:\n fname = os.path.join(plugdata['path'], '__plugins__.py')\n packages = plugdata['packages']\n\n plugs = ',\\n'.join(map(lambda x: \"r'{0}'\".format(x), packages))\n data = [\n '__recurse__ = {0}'.format(plugdata['recurse']),\n '__toc__ = [{0}]'.format(plugs)\n ]\n\n # write the data to the system\n f = open(fname, 'w')\n f.write('\\n'.join(data))\n f.close()\n\n return imports, datas",
"def classes(self) -> Iterator[str]:\n \"\"\"Yield the name of all classes discovered in the path map.\"\"\"\n yield from (\n c[:-6]\n for c in self.path_map.keys() if c.endswith('.class')\n )",
"def get_classes(self):\n \"\"\"Finds classes in file\n\n :return: list of top-level classes\n \"\"\"\n instances = self._get_instances(ast.ClassDef)\n instances = [\n PyClass(instance, self.package)\n for instance in instances\n ]\n return instances",
"def get_orm_classes(path):\n \"\"\"this will return prom.Orm classes found in the given path (classpath or modulepath)\"\"\"\n ret = set()\n try:\n m = importlib.import_module(path)\n\n except ImportError:\n # we have a classpath\n m, klass = get_objects(path)\n if issubclass(klass, Orm):\n ret.add(klass)\n\n else:\n ret.update(get_subclasses(m, Orm))\n\n return ret",
"def get_packaged_files(package_name):\n \"\"\" Collect relative paths to all files which have already been packaged \"\"\"\n if not os.path.isdir('dist'):\n return []\n return [os.path.join('dist', filename) for filename in os.listdir('dist')]"
] |
[
0.7355327606201172,
0.7090335488319397,
0.7000157833099365,
0.700005829334259,
0.6999524831771851,
0.6933577656745911,
0.689599871635437,
0.6882212162017822,
0.6865902543067932,
0.6856491565704346,
0.6845882534980774,
0.6808279752731323
] |
Return a tuple for authenticating a user
If not successful raise ``AgileError``.
|
def get_auth():
"""Return a tuple for authenticating a user
If not successful raise ``AgileError``.
"""
auth = get_auth_from_env()
if auth[0] and auth[1]:
return auth
home = os.path.expanduser("~")
config = os.path.join(home, '.gitconfig')
if not os.path.isfile(config):
raise GithubException('No .gitconfig available')
parser = configparser.ConfigParser()
parser.read(config)
if 'user' in parser:
user = parser['user']
if 'username' not in user:
raise GithubException('Specify username in %s user '
'section' % config)
if 'token' not in user:
raise GithubException('Specify token in %s user section'
% config)
return user['username'], user['token']
else:
raise GithubException('No user section in %s' % config)
|
[
"def authenticate(self, request):\n \"\"\"\n Returns a `User` if a correct access token has been supplied\n in the Authorization header. Otherwise returns `None`.\n \"\"\"\n auth = get_authorization_header(request).split()\n\n if not auth or auth[0].lower() != b'bearer':\n return None\n\n if len(auth) == 1:\n msg = 'Invalid authorization header. No credentials provided.'\n raise exceptions.AuthenticationFailed(msg)\n elif len(auth) > 2:\n msg = 'Invalid authorization header. Access token should not contain spaces.'\n raise exceptions.AuthenticationFailed(msg)\n\n # Authenticate the user\n # The AdfsAuthCodeBackend authentication backend will notice the \"access_token\" parameter\n # and skip the request for an access token using the authorization code\n user = authenticate(access_token=auth[1])\n\n if user is None:\n raise exceptions.AuthenticationFailed('Invalid access token.')\n\n if not user.is_active:\n raise exceptions.AuthenticationFailed('User inactive or deleted.')\n\n return user, auth[1]",
"def authenticate(self, request):\n \"\"\"\n Returns two-tuple of (user, token) if authentication succeeds,\n or None otherwise.\n \"\"\"\n auth_header = get_authorization_header(request).decode(HTTP_HEADER_ENCODING)\n auth = auth_header.split()\n\n if not auth or auth[0].lower() != 'bearer':\n return None\n\n if len(auth) == 1:\n msg = 'Invalid token header. No backend provided.'\n raise exceptions.AuthenticationFailed(msg)\n elif len(auth) == 2:\n msg = 'Invalid token header. No credentials provided.'\n raise exceptions.AuthenticationFailed(msg)\n elif len(auth) > 3:\n msg = 'Invalid token header. Token string should not contain spaces.'\n raise exceptions.AuthenticationFailed(msg)\n\n token = auth[2]\n backend = auth[1]\n\n strategy = load_strategy(request=request)\n\n try:\n backend = load_backend(strategy, backend, reverse(NAMESPACE + \":complete\", args=(backend,)))\n except MissingBackend:\n msg = 'Invalid token header. Invalid backend.'\n raise exceptions.AuthenticationFailed(msg)\n\n try:\n user = backend.do_auth(access_token=token)\n except requests.HTTPError as e:\n msg = e.response.text\n raise exceptions.AuthenticationFailed(msg)\n\n if not user:\n msg = 'Bad credentials.'\n raise exceptions.AuthenticationFailed(msg)\n return user, token",
"def authenticate(org):\n \"\"\"\n Authenticate with GitHub via SSH if possible\n Otherwise authenticate via HTTPS\n Returns an authenticated User\n \"\"\"\n with ProgressBar(_(\"Authenticating\")) as progress_bar:\n user = _authenticate_ssh(org)\n progress_bar.stop()\n if user is None:\n # SSH auth failed, fallback to HTTPS\n with _authenticate_https(org) as user:\n yield user\n else:\n yield user",
"def user_auth(\n self,\n cloudflare_email=None,\n cloudflare_pass=None,\n unique_id=None\n ):\n \"\"\"\n Get user_key based on either his email and password or unique_id.\n\n :param cloudflare_email: email associated with user\n :type cloudflare_email: str\n :param cloudflare_pass: pass associated with user\n :type cloudflare_pass: str\n :param unique_id: unique id associated with user\n :type unique_id: str\n\n :returns:\n :rtype: dict\n \"\"\"\n if not (cloudflare_email and cloudflare_pass) and not unique_id:\n raise KeyError(\n 'Either cloudflare_email and cloudflare_pass or unique_id must be present')\n params = {'act': 'user_auth'}\n if cloudflare_email and cloudflare_pass:\n params['cloudflare_email'] = cloudflare_email\n params['cloudflare_pass'] = cloudflare_pass\n if unique_id:\n params['unique_id'] = unique_id\n\n return self._request(params)",
"def authenticate(self, request, **credentials):\n \"\"\"Only authenticates, does not actually login. See `login`\"\"\"\n from allauth.account.auth_backends import AuthenticationBackend\n\n self.pre_authenticate(request, **credentials)\n AuthenticationBackend.unstash_authenticated_user()\n user = authenticate(request, **credentials)\n alt_user = AuthenticationBackend.unstash_authenticated_user()\n user = user or alt_user\n if user and app_settings.LOGIN_ATTEMPTS_LIMIT:\n cache_key = self._get_login_attempts_cache_key(\n request, **credentials)\n cache.delete(cache_key)\n else:\n self.authentication_failed(request, **credentials)\n return user",
"def _stash_user(cls, user):\n \"\"\"Now, be aware, the following is quite ugly, let me explain:\n\n Even if the user credentials match, the authentication can fail because\n Django's default ModelBackend calls user_can_authenticate(), which\n checks `is_active`. Now, earlier versions of allauth did not do this\n and simply returned the user as authenticated, even in case of\n `is_active=False`. For allauth scope, this does not pose a problem, as\n these users are properly redirected to an account inactive page.\n\n This does pose a problem when the allauth backend is used in a\n different context where allauth is not responsible for the login. Then,\n by not checking on `user_can_authenticate()` users will allow to become\n authenticated whereas according to Django logic this should not be\n allowed.\n\n In order to preserve the allauth behavior while respecting Django's\n logic, we stash a user for which the password check succeeded but\n `user_can_authenticate()` failed. In the allauth authentication logic,\n we can then unstash this user and proceed pointing the user to the\n account inactive page.\n \"\"\"\n global _stash\n ret = getattr(_stash, 'user', None)\n _stash.user = user\n return ret",
"def authenticate(self, request):\n \"\"\"\n Authenticate the request and return a tuple of (user, token) or None\n if there was no authentication attempt.\n \"\"\"\n access_token = self.get_access_token(request)\n\n if not access_token:\n return None\n\n try:\n user = self.backend.get_or_create_user(access_token, None, None)\n except HTTPError as exc:\n resp = exc.response\n\n # if the oidc provider returns 401, it means the token is invalid.\n # in that case, we want to return the upstream error message (which\n # we can get from the www-authentication header) in the response.\n if resp.status_code == 401 and 'www-authenticate' in resp.headers:\n data = parse_www_authenticate_header(resp.headers['www-authenticate'])\n raise exceptions.AuthenticationFailed(data['error_description'])\n\n # for all other http errors, just re-raise the exception.\n raise\n except SuspiciousOperation as exc:\n LOGGER.info('Login failed: %s', exc)\n raise exceptions.AuthenticationFailed('Login failed')\n\n if not user:\n msg = 'Login failed: No user found for the given access token.'\n raise exceptions.AuthenticationFailed(msg)\n\n return user, access_token",
"def auth(self):\n \"\"\" HTTP authentication data as a (user, password) tuple. This\n implementation currently supports basic (not digest) authentication\n only. If the authentication happened at a higher level (e.g. in the\n front web-server or a middleware), the password field is None, but\n the user field is looked up from the ``REMOTE_USER`` environ\n variable. On any errors, None is returned. \"\"\"\n basic = parse_auth(self.environ.get('HTTP_AUTHORIZATION',''))\n if basic: return basic\n ruser = self.environ.get('REMOTE_USER')\n if ruser: return (ruser, None)\n return None",
"def authenticate(self, request: AxesHttpRequest, username: str = None, password: str = None, **kwargs: dict):\n \"\"\"\n Checks user lockout status and raise a PermissionDenied if user is not allowed to log in.\n\n This method interrupts the login flow and inserts error message directly to the\n ``response_context`` attribute that is supplied as a keyword argument.\n\n :keyword response_context: kwarg that will be have its ``error`` attribute updated with context.\n :raises AxesBackendRequestParameterRequired: if request parameter is not passed.\n :raises AxesBackendPermissionDenied: if user is already locked out.\n \"\"\"\n\n if request is None:\n raise AxesBackendRequestParameterRequired('AxesBackend requires a request as an argument to authenticate')\n\n credentials = get_credentials(username=username, password=password, **kwargs)\n\n if AxesProxyHandler.is_allowed(request, credentials):\n return\n\n # Locked out, don't try to authenticate, just update response_context and return.\n # Its a bit weird to pass a context and expect a response value but its nice to get a \"why\" back.\n\n error_msg = get_lockout_message()\n response_context = kwargs.get('response_context', {})\n response_context['error'] = error_msg\n\n # Raise an error that stops the authentication flows at django.contrib.auth.authenticate.\n # This error stops bubbling up at the authenticate call which catches backend PermissionDenied errors.\n # After this error is caught by authenticate it emits a signal indicating user login failed,\n # which is processed by axes.signals.log_user_login_failed which logs the attempt and raises\n # a second exception which bubbles up the middleware stack and produces a HTTP 403 Forbidden reply\n # in the axes.middleware.AxesMiddleware.process_exception middleware exception handler.\n\n raise AxesBackendPermissionDenied('AxesBackend detected that the given user is locked out')",
"def Authenticate(self, app_id, challenge_data,\n print_callback=sys.stderr.write):\n \"\"\"See base class.\"\"\"\n for authenticator in self.authenticators:\n if authenticator.IsAvailable():\n result = authenticator.Authenticate(app_id,\n challenge_data,\n print_callback)\n return result\n\n raise ValueError('No valid authenticators found')",
"def get_user_password (self):\n \"\"\"Get tuple (user, password) from configured authentication.\n Both user and password can be None.\n \"\"\"\n if self.userinfo:\n # URL itself has authentication info\n return urllib.splitpasswd(self.userinfo)\n return self.aggregate.config.get_user_password(self.url)",
"def authenticate_credentials(self, payload):\n \"\"\"\n Returns an active user that matches the payload's user id and email.\n \"\"\"\n User = get_user_model()\n username = jwt_get_username_from_payload(payload)\n\n if not username:\n msg = _('Invalid payload.')\n raise exceptions.AuthenticationFailed(msg)\n\n try:\n user = User.objects.get_by_natural_key(username)\n except User.DoesNotExist:\n msg = _('Invalid signature.')\n raise exceptions.AuthenticationFailed(msg)\n\n if not user.is_active:\n msg = _('User account is disabled.')\n raise exceptions.AuthenticationFailed(msg)\n\n return user"
] |
[
0.7139577269554138,
0.7108141779899597,
0.7068831920623779,
0.7052825093269348,
0.7042210698127747,
0.7039636373519897,
0.7035170793533325,
0.7032013535499573,
0.6986998319625854,
0.6977265477180481,
0.6957534551620483,
0.6925418972969055
] |
Function checkAndCreate
Check if an architectures exists and create it if not
@param key: The targeted architectures
@param payload: The targeted architectures description
@param osIds: The list of os ids liked with this architecture
@return RETURN: The id of the object
|
def checkAndCreate(self, key, payload, osIds):
""" Function checkAndCreate
Check if an architectures exists and create it if not
@param key: The targeted architectures
@param payload: The targeted architectures description
@param osIds: The list of os ids liked with this architecture
@return RETURN: The id of the object
"""
if key not in self:
self[key] = payload
oid = self[key]['id']
if not oid:
return False
#~ To be sure the OS list is good, we ensure our os are in the list
for os in self[key]['operatingsystems']:
osIds.add(os['id'])
self[key]["operatingsystem_ids"] = list(osIds)
if (len(self[key]['operatingsystems']) is not len(osIds)):
return False
return oid
|
[
"def checkAndCreate(self, key, payload):\n \"\"\" Function checkAndCreate\n Check if an object exists and create it if not\n\n @param key: The targeted object\n @param payload: The targeted object description\n @return RETURN: The id of the object\n \"\"\"\n if key not in self:\n if 'templates' in payload:\n templates = payload.pop('templates')\n self[key] = payload\n self.reload()\n return self[key]['id']",
"def checkAndCreate(self, key, payload):\n \"\"\" Function checkAndCreate\n Check if an object exists and create it if not\n\n @param key: The targeted object\n @param payload: The targeted object description\n @return RETURN: The id of the object\n \"\"\"\n if key not in self:\n self[key] = payload\n return self[key]['id']",
"def checkAndCreate(self, key, payload, domainId):\n \"\"\" Function checkAndCreate\n Check if a subnet exists and create it if not\n\n @param key: The targeted subnet\n @param payload: The targeted subnet description\n @param domainId: The domainId to be attached wiuth the subnet\n @return RETURN: The id of the subnet\n \"\"\"\n if key not in self:\n self[key] = payload\n oid = self[key]['id']\n if not oid:\n return False\n #~ Ensure subnet contains the domain\n subnetDomainIds = []\n for domain in self[key]['domains']:\n subnetDomainIds.append(domain['id'])\n if domainId not in subnetDomainIds:\n subnetDomainIds.append(domainId)\n self[key][\"domain_ids\"] = subnetDomainIds\n if len(self[key][\"domains\"]) is not len(subnetDomainIds):\n return False\n return oid",
"def checkAndCreate(self, key, payload,\n hostgroupConf,\n hostgroupParent,\n puppetClassesId):\n \"\"\" Function checkAndCreate\n check And Create procedure for an hostgroup\n - check the hostgroup is not existing\n - create the hostgroup\n - Add puppet classes from puppetClassesId\n - Add params from hostgroupConf\n\n @param key: The hostgroup name or ID\n @param payload: The description of the hostgroup\n @param hostgroupConf: The configuration of the host group from the\n foreman.conf\n @param hostgroupParent: The id of the parent hostgroup\n @param puppetClassesId: The dict of puppet classes ids in foreman\n @return RETURN: The ItemHostsGroup object of an host\n \"\"\"\n if key not in self:\n self[key] = payload\n oid = self[key]['id']\n if not oid:\n return False\n\n # Create Hostgroup classes\n if 'classes' in hostgroupConf.keys():\n classList = list()\n for c in hostgroupConf['classes']:\n classList.append(puppetClassesId[c])\n if not self[key].checkAndCreateClasses(classList):\n print(\"Failed in classes\")\n return False\n\n # Set params\n if 'params' in hostgroupConf.keys():\n if not self[key].checkAndCreateParams(hostgroupConf['params']):\n print(\"Failed in params\")\n return False\n\n return oid",
"def create(self, obj, payload, async=False):\n \"\"\" Function create\n Create an new object\n\n @param obj: object name ('hosts', 'puppetclasses'...)\n @param payload: the dict of the payload\n @param async: should this request be async, if true use\n return.result() to get the response\n @return RETURN: the server response\n \"\"\"\n self.url = self.base_url + obj\n self.method = 'POST'\n self.payload = json.dumps(payload)\n if async:\n self.method = 'POST(Async)'\n session = FuturesSession()\n self.resp = session.post(url=self.url, auth=self.auth,\n headers=self.headers, data=self.payload,\n cert=self.ca_cert)\n return self.resp\n else:\n self.resp = requests.post(url=self.url, auth=self.auth,\n headers=self.headers,\n data=self.payload, cert=self.ca_cert)\n return self.__process_resp__(obj)",
"def make_create_payload(**kwargs):\n \"\"\"Create payload for upload/check-upload operations.\"\"\"\n payload = {}\n # Add non-empty arguments\n for k, v in six.iteritems(kwargs):\n if v is not None:\n payload[k] = v\n\n return payload",
"def create_payload(self):\n \"\"\"Rename the payload key \"prior_id\" to \"prior\".\n\n For more information, see `Bugzilla #1238757\n <https://bugzilla.redhat.com/show_bug.cgi?id=1238757>`_.\n\n \"\"\"\n payload = super(LifecycleEnvironment, self).create_payload()\n if (_get_version(self._server_config) < Version('6.1') and\n 'prior_id' in payload):\n payload['prior'] = payload.pop('prior_id')\n return payload",
"def _check_agent_id_bulk(self, payload):\n \"\"\"Checks that all the given agent ids are valid non-empty strings\n and if the agents are serializable.\n\n :param list payload: list of dictionnary which represents an agent.\n\n :return: list of the agents with valid ids, list of the agents with\n invalid ids, list of the dictionnaries with valid ids.\n :rtype: list, list, list of dict.\n\n :raise CraftAiBadRequestError: If all the agents are invalid.\n \"\"\"\n invalid_agent_indices = []\n valid_agent_indices = []\n invalid_payload = []\n for index, agent in enumerate(payload):\n # Check if the agent ID is valid\n try:\n if \"id\" in agent:\n self._check_agent_id(agent[\"id\"])\n except CraftAiBadRequestError:\n invalid_agent_indices.append(index)\n invalid_payload.append({\"id\": agent[\"id\"],\n \"error\": CraftAiBadRequestError(ERROR_ID_MESSAGE)})\n else:\n # Check if the agent is serializable\n try:\n json.dumps([agent])\n except TypeError as err:\n invalid_agent_indices.append(index)\n invalid_payload.append({\"id\": agent[\"id\"],\n \"error\": err})\n else:\n valid_agent_indices.append(index)\n\n if len(invalid_agent_indices) == len(payload):\n raise CraftAiBadRequestError(ERROR_ID_MESSAGE)\n\n return valid_agent_indices, invalid_agent_indices, invalid_payload",
"def get_instance(self, payload):\n \"\"\"\n Build an instance of KeyInstance\n\n :param dict payload: Payload response from the API\n\n :returns: twilio.rest.preview.deployed_devices.fleet.key.KeyInstance\n :rtype: twilio.rest.preview.deployed_devices.fleet.key.KeyInstance\n \"\"\"\n return KeyInstance(self._version, payload, fleet_sid=self._solution['fleet_sid'], )",
"public CMAAsset create(String spaceId, String environmentId, CMAAsset asset) {\n assertNotNull(spaceId, \"spaceId\");\n assertNotNull(environmentId, \"environmentId\");\n assertNotNull(asset, \"asset\");\n\n final String assetId = asset.getId();\n\n final CMASystem sys = asset.getSystem();\n asset.setSystem(null);\n\n try {\n if (assetId == null) {\n return service.create(spaceId, environmentId, asset).blockingFirst();\n } else {\n return service.create(spaceId, environmentId, assetId, asset).blockingFirst();\n }\n } finally {\n asset.setSystem(sys);\n }\n }",
"def get_instance(self, payload):\n \"\"\"\n Build an instance of DeviceInstance\n\n :param dict payload: Payload response from the API\n\n :returns: twilio.rest.preview.deployed_devices.fleet.device.DeviceInstance\n :rtype: twilio.rest.preview.deployed_devices.fleet.device.DeviceInstance\n \"\"\"\n return DeviceInstance(self._version, payload, fleet_sid=self._solution['fleet_sid'], )",
"def create(gandi, name, size, type, quantity, duration, datacenter, vhosts,\n password, snapshotprofile, background, sshkey, ssl, private_key,\n poll_cert):\n \"\"\"Create a new PaaS instance and initialize associated git repository.\n\n you can specify a configuration entry named 'sshkey' containing\n path to your sshkey file\n\n $ gandi config set [-g] sshkey ~/.ssh/id_rsa.pub\n\n or getting the sshkey \"my_key\" from your gandi ssh keyring\n\n $ gandi config set [-g] sshkey my_key\n\n to know which PaaS instance type to use as type\n\n $ gandi paas types\n\n \"\"\"\n try:\n gandi.datacenter.is_opened(datacenter, 'paas')\n except DatacenterLimited as exc:\n gandi.echo('/!\\ Datacenter %s will be closed on %s, '\n 'please consider using another datacenter.' %\n (datacenter, exc.date))\n\n if not password:\n password = click.prompt('password', hide_input=True,\n confirmation_prompt=True)\n\n if not name:\n name = randomstring('paas')\n\n if vhosts and not gandi.hostedcert.activate_ssl(vhosts,\n ssl,\n private_key,\n poll_cert):\n return\n\n result = gandi.paas.create(name, size, type, quantity, duration,\n datacenter, vhosts, password,\n snapshotprofile, background, sshkey)\n return result"
] |
[
0.751899242401123,
0.7515250444412231,
0.7219224572181702,
0.6788215637207031,
0.6209136843681335,
0.6128567457199097,
0.6048723459243774,
0.60396808385849,
0.599178671836853,
0.59865802526474,
0.5976240634918213,
0.5968005657196045
] |
Get output (as a string) from pip command
:param pip_args: list o pip switches to pass
:return: string with results
|
def pip_command_output(pip_args):
"""
Get output (as a string) from pip command
:param pip_args: list o pip switches to pass
:return: string with results
"""
import sys
import pip
from io import StringIO
# as pip will write to stdout we use some nasty hacks
# to substitute system stdout with our own
old_stdout = sys.stdout
sys.stdout = mystdout = StringIO()
pip.main(pip_args)
output = mystdout.getvalue()
mystdout.truncate(0)
sys.stdout = old_stdout
return output
|
[
"def run_pip_command(command_args, pip_version=None, python_version=None):\n \"\"\"Run a pip command.\n\n Args:\n command_args (list of str): Args to pip.\n\n Returns:\n `subprocess.Popen`: Pip process.\n \"\"\"\n pip_exe, context = find_pip(pip_version, python_version)\n command = [pip_exe] + list(command_args)\n\n if context is None:\n return popen(command)\n else:\n return context.execute_shell(command=command, block=False)",
"def get_output(cmd, args):\n \"\"\"Runs a command and returns its output (stdout + stderr).\n\n :param str|unicode cmd:\n :param str|unicode|list[str|unicode] args:\n\n :rtype: str|unicode\n\n \"\"\"\n from subprocess import Popen, STDOUT, PIPE\n\n command = [cmd]\n command.extend(listify(args))\n\n process = Popen(command, stdout=PIPE, stderr=STDOUT)\n out, _ = process.communicate()\n\n return out.decode('utf-8')",
"def _execute_pip(self, args, log=True):\n \"\"\"\n Executes pip commands.\n\n :param args: Arguments to pass to pip (list[str])\n :param log: Log the output to a file [default: True] (boolean)\n :return: See _execute\n \"\"\"\n\n # Copy the pip calling arguments so they can be extended\n exec_args = list(self._pip)\n\n # Older versions of pip don't support the version check argument.\n # Fixes https://github.com/sjkingo/virtualenv-api/issues/35\n if self.pip_version[0] >= 6:\n exec_args.append('--disable-pip-version-check')\n\n exec_args.extend(args)\n return self._execute(exec_args, log=log)",
"def pip_report(self):\n \"\"\"\n Show editable pip-requirements line necessary to clone this repository\n\n Yields:\n str: pip-requirements line necessary to clone this repository\n \"\"\"\n comment = '#' if not self.remote_url else ''\n if os.path.exists(os.path.join(self.fpath, 'setup.py')):\n yield u\"%s-e %s+%s@%s#egg=%s\" % (\n comment,\n self.label,\n self.to_normal_url(self.remote_url),\n self.current_id,\n self.eggname)\n return",
"def get_pipe_series_output(commands: Sequence[str],\n stdinput: BinaryIO = None) -> bytes:\n \"\"\"\n Get the output from a piped series of commands.\n\n Args:\n commands: sequence of command strings\n stdinput: optional ``stdin`` data to feed into the start of the pipe\n\n Returns:\n ``stdout`` from the end of the pipe\n\n \"\"\"\n # Python arrays indexes are zero-based, i.e. an array is indexed from\n # 0 to len(array)-1.\n # The range/xrange commands, by default, start at 0 and go to one less\n # than the maximum specified.\n\n # print commands\n processes = [] # type: List[subprocess.Popen]\n for i in range(len(commands)):\n if i == 0: # first processes\n processes.append(\n subprocess.Popen(\n shlex.split(commands[i]),\n stdin=subprocess.PIPE,\n stdout=subprocess.PIPE\n )\n )\n else: # subsequent ones\n processes.append(\n subprocess.Popen(\n shlex.split(commands[i]),\n stdin=processes[i - 1].stdout,\n stdout=subprocess.PIPE\n )\n )\n return processes[len(processes) - 1].communicate(stdinput)[0]",
"def pip(args):\n \"\"\"Run pip, in-process.\"\"\"\n from sys import stdout\n stdout.write(colorize(('pip',) + args))\n stdout.write('\\n')\n stdout.flush()\n\n return pipmodule._internal.main(list(args))",
"def pip_command(self, command, *args):\n \"\"\"\n Runs a pip command\n \"\"\"\n try:\n from pip._internal import main as pip_main\n except ImportError:\n from pip import main as pip_main\n\n args = [command] + list(args)\n if self.verbosity == 0:\n args.insert(0, '--quiet')\n elif self.verbosity == 2:\n args.insert(0, '--verbose')\n return pip_main(args)",
"def get_output_from_pipe(self, input_file):\n \"\"\"Executes an external command and get its output. The command\n receives its input_file from the stdin through a pipe\n \n :param input_file: input file\n :return: output of command\n \"\"\"\n args = shlex.split(self.cmd)\n p = Popen(args, stdout=PIPE, stdin=PIPE) # | grep es\n p.stdin.write(bytearray(input_file.encode(\"utf8\"))) # echo test |\n return p.communicate()[0].decode(\"utf8\")",
"def get_pip_options(args=[], sources=None, pip_command=None):\n \"\"\"Build a pip command from a list of sources\n\n :param args: positional arguments passed through to the pip parser\n :param sources: A list of pipfile-formatted sources, defaults to None\n :param sources: list[dict], optional\n :param pip_command: A pre-built pip command instance\n :type pip_command: :class:`~pip._internal.cli.base_command.Command`\n :return: An instance of pip_options using the supplied arguments plus sane defaults\n :rtype: :class:`~pip._internal.cli.cmdoptions`\n \"\"\"\n\n if not pip_command:\n pip_command = get_pip_command()\n if not sources:\n sources = [\n {\"url\": \"https://pypi.org/simple\", \"name\": \"pypi\", \"verify_ssl\": True}\n ]\n _ensure_dir(CACHE_DIR)\n pip_args = args\n pip_args = prepare_pip_source_args(sources, pip_args)\n pip_options, _ = pip_command.parser.parse_args(pip_args)\n pip_options.cache_dir = CACHE_DIR\n return pip_options",
"def CheckOutput(*popenargs, **kwargs):\n \"\"\"\n Run command with arguments and return its output as a byte string.\n Backported from Python 2.7 as it's implemented as pure python on stdlib.\n \"\"\"\n process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)\n output, _ = process.communicate()\n retcode = process.poll()\n if retcode:\n cmd = kwargs.get(\"args\")\n if cmd is None:\n cmd = popenargs[0]\n error = subprocess.CalledProcessError(retcode, cmd)\n error.output = output\n raise error\n return retcode, output",
"def run(args: List[str],\n get_output: bool = False,\n encoding: str = sys.getdefaultencoding()) -> Tuple[str, str]:\n \"\"\"\n Run an external command +/- return the results.\n Returns a ``(stdout, stderr)`` tuple (both are blank strings if the output\n wasn't wanted).\n \"\"\"\n printable = \" \".join(shlex.quote(x) for x in args).replace(\"\\n\", r\"\\n\")\n log.debug(\"Running external command: {}\", printable)\n if get_output:\n p = subprocess.run(args, stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, check=True)\n stdout, stderr = p.stdout.decode(encoding), p.stderr.decode(encoding)\n else:\n subprocess.check_call(args)\n stdout, stderr = \"\", \"\"\n return stdout, stderr",
"def main(args=None):\n \"\"\"Entry point for pypyr cli.\n\n The setup_py entry_point wraps this in sys.exit already so this effectively\n becomes sys.exit(main()).\n The __main__ entry point similarly wraps sys.exit().\n \"\"\"\n if args is None:\n args = sys.argv[1:]\n\n parsed_args = get_args(args)\n\n try:\n return pypyr.pipelinerunner.main(\n pipeline_name=parsed_args.pipeline_name,\n pipeline_context_input=parsed_args.pipeline_context,\n working_dir=parsed_args.working_dir,\n log_level=parsed_args.log_level,\n log_path=parsed_args.log_path)\n except KeyboardInterrupt:\n # Shell standard is 128 + signum = 130 (SIGINT = 2)\n sys.stdout.write(\"\\n\")\n return 128 + signal.SIGINT\n except Exception as e:\n # stderr and exit code 255\n sys.stderr.write(\"\\n\")\n sys.stderr.write(f\"\\033[91m{type(e).__name__}: {str(e)}\\033[0;0m\")\n sys.stderr.write(\"\\n\")\n # at this point, you're guaranteed to have args and thus log_level\n if parsed_args.log_level < 10:\n # traceback prints to stderr by default\n traceback.print_exc()\n\n return 255"
] |
[
0.7169312238693237,
0.7122620940208435,
0.7016721963882446,
0.6971988677978516,
0.6969940066337585,
0.6961489319801331,
0.6926828622817993,
0.6923568248748779,
0.678249180316925,
0.672666072845459,
0.6705126166343689,
0.6704131960868835
] |
Generate (temporarily) versioneer.py file in project root directory
:return:
|
def setup_versioneer():
"""
Generate (temporarily) versioneer.py file in project root directory
:return:
"""
try:
# assume versioneer.py was generated using "versioneer install" command
import versioneer
versioneer.get_version()
except ImportError:
# it looks versioneer.py is missing
# lets assume that versioneer package is installed
# and versioneer binary is present in $PATH
import subprocess
try:
# call versioneer install to generate versioneer.py
subprocess.check_output(["versioneer", "install"])
except OSError:
# it looks versioneer is missing from $PATH
# probably versioneer is installed in some user directory
# query pip for list of files in versioneer package
# line below is equivalen to putting result of
# "pip show -f versioneer" command to string output
output = pip_command_output(["show", "-f", "versioneer"])
# now we parse the results
import os
# find absolute path where *versioneer package* was installed
# and store it in main_path
main_path = [x[len("Location: "):] for x in output.splitlines()
if x.startswith("Location")][0]
# find path relative to main_path where
# *versioneer binary* was installed
bin_path = [x[len(" "):] for x in output.splitlines()
if x.endswith(os.path.sep + "versioneer")][0]
# exe_path is absolute path to *versioneer binary*
exe_path = os.path.join(main_path, bin_path)
# call versioneer install to generate versioneer.py
# line below is equivalent to running in terminal
# "python versioneer install"
subprocess.check_output(["python", exe_path, "install"])
|
[
"def get_root():\n \"\"\"Get the project root directory.\n\n We require that all commands are run from the project root, i.e. the\n directory that contains setup.py, setup.cfg, and versioneer.py .\n \"\"\"\n root = os.path.realpath(os.path.abspath(os.getcwd()))\n setup_py = os.path.join(root, \"setup.py\")\n versioneer_py = os.path.join(root, \"versioneer.py\")\n if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):\n # allow 'python path/to/setup.py COMMAND'\n root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0])))\n setup_py = os.path.join(root, \"setup.py\")\n versioneer_py = os.path.join(root, \"versioneer.py\")\n if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)):\n err = (\"Versioneer was unable to run the project root directory. \"\n \"Versioneer requires setup.py to be executed from \"\n \"its immediate directory (like 'python setup.py COMMAND'), \"\n \"or in a way that lets it use sys.argv[0] to find the root \"\n \"(like 'python path/to/setup.py COMMAND').\")\n raise VersioneerBadRootError(err)\n try:\n # Certain runtime workflows (setup.py install/develop in a setuptools\n # tree) execute all dependencies in a single python process, so\n # \"versioneer\" may be imported multiple times, and python's shared\n # module-import table will cache the first one. So we can't use\n # os.path.dirname(__file__), as that will find whichever\n # versioneer.py was first imported, even in later projects.\n me = os.path.realpath(os.path.abspath(__file__))\n if os.path.splitext(me)[0] != os.path.splitext(versioneer_py)[0]:\n print(\"Warning: build in %s is using versioneer.py from %s\"\n % (os.path.dirname(me), versioneer_py))\n except NameError:\n pass\n return root",
"def run(self):\n \"\"\"Create the versioneer.py file.\"\"\"\n\n print(\" creating %s\" % versionfile_source)\n with open(versionfile_source, \"w\") as f:\n f.write(get_vcs_code())\n\n ipy = os.path.join(os.path.dirname(versionfile_source), \"__init__.py\")\n try:\n with open(ipy, \"r\") as f:\n old = f.read()\n except EnvironmentError:\n old = \"\"\n if INIT_PY_SNIPPET not in old:\n print(\" appending to %s\" % ipy)\n with open(ipy, \"a\") as f:\n f.write(INIT_PY_SNIPPET)\n else:\n print(\" %s unmodified\" % ipy)\n\n # Make sure both the top-level \"versioneer.py\" and versionfile_source\n # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so\n # they'll be copied into source distributions. Pip won't be able to\n # install the package without this.\n manifest_in = os.path.join(get_root(), \"MANIFEST.in\")\n simple_includes = set()\n try:\n with open(manifest_in, \"r\") as f:\n for line in f:\n if line.startswith(\"include \"):\n for include in line.split()[1:]:\n simple_includes.add(include)\n except EnvironmentError:\n pass\n # That doesn't cover everything MANIFEST.in can do\n # (http://docs.python.org/2/distutils/sourcedist.html#commands), so\n # it might give some false negatives. Appending redundant 'include'\n # lines is safe, though.\n if \"versioneer.py\" not in simple_includes:\n print(\" appending 'versioneer.py' to MANIFEST.in\")\n with open(manifest_in, \"a\") as f:\n f.write(\"include versioneer.py\\n\")\n else:\n print(\" 'versioneer.py' already in MANIFEST.in\")\n if versionfile_source not in simple_includes:\n print(\" appending versionfile_source ('%s') to MANIFEST.in\" %\n versionfile_source)\n with open(manifest_in, \"a\") as f:\n f.write(\"include %s\\n\" % versionfile_source)\n else:\n print(\" versionfile_source already in MANIFEST.in\")\n\n # Make VCS-specific changes. For git, this means creating/changing\n # .gitattributes to mark _version.py for export-time keyword\n # substitution.\n \n do_vcs_install_f = getattr(sys.modules[__name__], VCS + '_do_vcs_install')\n do_vcs_install_f(manifest_in, versionfile_source, ipy)",
"def gen_version(do_write=True, txt=None):\n \"\"\"\n Generate a version based on git tag info. This will write the\n couchbase/_version.py file. If not inside a git tree it will\n raise a CantInvokeGit exception - which is normal\n (and squashed by setup.py) if we are running from a tarball\n \"\"\"\n\n if txt is None:\n txt = get_git_describe()\n\n try:\n info = VersionInfo(txt)\n vstr = info.package_version\n except MalformedGitTag:\n warnings.warn(\"Malformed input '{0}'\".format(txt))\n vstr = '0.0.0'+txt\n\n if not do_write:\n print(vstr)\n return\n\n lines = (\n '# This file automatically generated by',\n '# {0}'.format(__file__),\n '# at',\n '# {0}'.format(datetime.datetime.now().isoformat(' ')),\n \"__version__ = '{0}'\".format(vstr)\n )\n with open(VERSION_FILE, \"w\") as fp:\n fp.write(\"\\n\".join(lines))",
"def generate_version_py(packagename=None, version=None, release=None, debug=None,\n uses_git=None, srcdir='.'):\n \"\"\"\n Generate a version.py file in the package with version information, and\n update developer version strings.\n\n This function should normally be called without any arguments. In this case\n the package name and version is read in from the ``setup.cfg`` file (from\n the ``name`` or ``package_name`` entry and the ``version`` entry in the\n ``[metadata]`` section).\n\n If the version is a developer version (of the form ``3.2.dev``), the\n version string will automatically be expanded to include a sequential\n number as a suffix (e.g. ``3.2.dev13312``), and the updated version string\n will be returned by this function.\n\n Based on this updated version string, a ``version.py`` file will be\n generated inside the package, containing the version string as well as more\n detailed information (for example the major, minor, and bugfix version\n numbers, a ``release`` flag indicating whether the current version is a\n stable or developer version, and so on.\n \"\"\"\n\n if packagename is not None:\n warnings.warn('The packagename argument to generate_version_py has '\n 'been deprecated and will be removed in future. Specify '\n 'the package name in setup.cfg instead', AstropyDeprecationWarning)\n\n if version is not None:\n warnings.warn('The version argument to generate_version_py has '\n 'been deprecated and will be removed in future. Specify '\n 'the version number in setup.cfg instead', AstropyDeprecationWarning)\n\n if release is not None:\n warnings.warn('The release argument to generate_version_py has '\n 'been deprecated and will be removed in future. We now '\n 'use the presence of the \"dev\" string in the version to '\n 'determine whether this is a release', AstropyDeprecationWarning)\n\n # We use ConfigParser instead of read_configuration here because the latter\n # only reads in keys recognized by setuptools, but we need to access\n # package_name below.\n conf = ConfigParser()\n conf.read('setup.cfg')\n\n if conf.has_option('metadata', 'name'):\n packagename = conf.get('metadata', 'name')\n elif conf.has_option('metadata', 'package_name'):\n # The package-template used package_name instead of name for a while\n warnings.warn('Specifying the package name using the \"package_name\" '\n 'option in setup.cfg is deprecated - use the \"name\" '\n 'option instead.', AstropyDeprecationWarning)\n packagename = conf.get('metadata', 'package_name')\n elif packagename is not None: # deprecated\n pass\n else:\n sys.stderr.write('ERROR: Could not read package name from setup.cfg\\n')\n sys.exit(1)\n\n if conf.has_option('metadata', 'version'):\n version = conf.get('metadata', 'version')\n add_git_devstr = True\n elif version is not None: # deprecated\n add_git_devstr = False\n else:\n sys.stderr.write('ERROR: Could not read package version from setup.cfg\\n')\n sys.exit(1)\n\n if release is None:\n release = 'dev' not in version\n\n if not release and add_git_devstr:\n version += get_git_devstr(False)\n\n if uses_git is None:\n uses_git = not release\n\n # In some cases, packages have a - but this is a _ in the module. Since we\n # are only interested in the module here, we replace - by _\n packagename = packagename.replace('-', '_')\n\n try:\n version_module = get_pkg_version_module(packagename)\n\n try:\n last_generated_version = version_module._last_generated_version\n except AttributeError:\n last_generated_version = version_module.version\n\n try:\n last_githash = version_module._last_githash\n except AttributeError:\n last_githash = version_module.githash\n\n current_release = version_module.release\n current_debug = version_module.debug\n except ImportError:\n version_module = None\n last_generated_version = None\n last_githash = None\n current_release = None\n current_debug = None\n\n if release is None:\n # Keep whatever the current value is, if it exists\n release = bool(current_release)\n\n if debug is None:\n # Likewise, keep whatever the current value is, if it exists\n debug = bool(current_debug)\n\n package_srcdir = os.path.join(srcdir, *packagename.split('.'))\n version_py = os.path.join(package_srcdir, 'version.py')\n\n if (last_generated_version != version or current_release != release or\n current_debug != debug):\n if '-q' not in sys.argv and '--quiet' not in sys.argv:\n log.set_threshold(log.INFO)\n\n if is_distutils_display_option():\n # Always silence unnecessary log messages when display options are\n # being used\n log.set_threshold(log.WARN)\n\n log.info('Freezing version number to {0}'.format(version_py))\n\n with open(version_py, 'w') as f:\n # This overwrites the actual version.py\n f.write(_get_version_py_str(packagename, version, last_githash,\n release, debug, uses_git=uses_git))\n\n return version",
"def generateRevision(self):\n \"\"\"\n Generates the revision file for this builder.\n \"\"\"\n revpath = self.sourcePath()\n if not os.path.exists(revpath):\n return\n\n # determine the revision location\n revfile = os.path.join(revpath, self.revisionFilename())\n mode = ''\n # test for svn revision\n try:\n args = ['svn', 'info', revpath]\n proc = subprocess.Popen(args, stdout=subprocess.PIPE)\n mode = 'svn'\n except WindowsError:\n try:\n args = ['git', 'rev-parse', 'HEAD', revpath]\n proc = subprocess.Popen(args, stdout=subprocess.PIPE)\n mode = 'git'\n except WindowsError:\n return\n\n # process SVN revision\n rev = None\n\n if mode == 'svn':\n for line in proc.stdout:\n data = re.match('^Revision: (\\d+)', line)\n if data:\n rev = int(data.group(1))\n break\n\n if rev is not None:\n try:\n f = open(revfile, 'w')\n f.write('__revision__ = {0}\\n'.format(rev))\n f.close()\n except IOError:\n pass",
"def generate_version_file(infile='version.py.in', outfile='version.py', **kwargs):\n \"\"\"\n Using input file, generate an output file with the version info for this package.\n This should generally be used in setup.py to generate build stamp info.\n :param infile:\n :param outfile:\n :param kwargs:\n :return:\n \"\"\"\n\n if not os.path.exists(outfile) and os.path.exists(infile):\n now = datetime.datetime.now()\n repo_hash = ''\n git_path = find_exe('git')\n if git_path is not None:\n git_cmd = [find_exe('git'), 'log', '--format=%h', '-1']\n repo_hash = subprocess.Popen(git_cmd, stdout=subprocess.PIPE).communicate()[0].strip()\n if repo_hash.strip() == '':\n repo_hash = '{now.hour}.{now.minute}.{now.second}'.format(now=now)\n\n sub_dict = {\n 'repo_hash': repo_hash,\n 'build_date': '{now.year}.{now.month}.{now.day}'.format(now=now),\n 'build_time': '{now.hour}.{now.minute}.{now.second}'.format(now=now),\n 'build_host': socket.gethostname(),\n 'build_user': getpass.getuser()\n }\n sub_dict.update(**kwargs)\n\n with open(infile) as version_in:\n version_data = version_in.read()\n version_final = string.Template(version_data).safe_substitute(sub_dict)\n\n with open(outfile, 'w') as version_out:\n version_out.write(version_final)\n\n return outfile if os.path.exists(outfile) else None",
"def _generate_version(base_version):\n \"\"\"Generate a version with information about the git repository\"\"\"\n pkg_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))\n\n if not _is_git_repo(pkg_dir) or not _have_git():\n return base_version\n\n if _is_release(pkg_dir, base_version) and not _is_dirty(pkg_dir):\n return base_version\n\n return \"{base_version}+{short_sha}{dirty}\".format(\n base_version=base_version,\n short_sha=_git_revision(pkg_dir).decode(\"utf-8\")[0:6],\n dirty=\".mod\" if _is_dirty(pkg_dir) else \"\",\n )",
"def bump_minor_version():\n \"\"\"Bump the minor version in version.py.\"\"\"\n version = load_version_as_list()\n print('current version: {}'.format(format_version_string(version)))\n version[-1] += 1\n print('new version: {}'.format(format_version_string(version)))\n\n contents = \"__version__ = '{}'\\n\".format(format_version_string(version))\n\n with open(VERSION_PATH, 'w') as wfile:\n wfile.write(contents)",
"def embed_version(basepath, ref='v0.2.2'):\n \"\"\"\n Autover is purely a build time dependency in all cases (conda and\n pip) except for when you use pip's remote git support [git+url] as\n 1) you need a dynamically changing version and 2) the environment\n starts off clean with zero dependencies installed.\n This function acts as a fallback to make Version available until\n PEP518 is commonly supported by pip to express build dependencies.\n \"\"\"\n import io, zipfile, importlib\n try: from urllib.request import urlopen\n except: from urllib import urlopen\n try:\n url = 'https://github.com/ioam/autover/archive/{ref}.zip'\n response = urlopen(url.format(ref=ref))\n zf = zipfile.ZipFile(io.BytesIO(response.read()))\n ref = ref[1:] if ref.startswith('v') else ref\n embed_version = zf.read('autover-{ref}/autover/version.py'.format(ref=ref))\n with open(os.path.join(basepath, 'version.py'), 'wb') as f:\n f.write(embed_version)\n return importlib.import_module(\"version\")\n except:\n return None",
"def set_version(mod_root):\n \"\"\"\n mod_root\n a VERSION file containes the version strings is created in mod_root,\n during installation. That file is used at runtime to get the version\n information.\n \"\"\"\n\n try:\n\n version_base = None\n version_detail = None\n\n # get version from './VERSION'\n src_root = os.path.dirname(__file__)\n if not src_root:\n src_root = '.'\n\n with open(src_root + '/VERSION', 'r') as f:\n version_base = f.readline().strip()\n\n # attempt to get version detail information from git\n # We only do that though if we are in a repo root dir,\n # ie. if 'git rev-parse --show-prefix' returns an empty string --\n # otherwise we get confused if the ve lives beneath another repository,\n # and the pip version used uses an install tmp dir in the ve space\n # instead of /tmp (which seems to happen with some pip/setuptools\n # versions).\n p = sp.Popen('cd %s ; '\n 'test -z `git rev-parse --show-prefix` || exit -1; '\n 'tag=`git describe --tags --always` 2>/dev/null ; '\n 'branch=`git branch | grep -e \"^*\" | cut -f 2- -d \" \"` 2>/dev/null ; '\n 'echo $tag@$branch' % src_root,\n stdout=sp.PIPE, stderr=sp.STDOUT, shell=True)\n version_detail = str(p.communicate()[0].strip())\n version_detail = version_detail.replace('detached from ', 'detached-')\n\n # remove all non-alphanumeric (and then some) chars\n version_detail = re.sub('[/ ]+', '-', version_detail)\n version_detail = re.sub('[^a-zA-Z0-9_+@.-]+', '', version_detail)\n\n if p.returncode != 0 or \\\n version_detail == '@' or \\\n 'git-error' in version_detail or \\\n 'not-a-git-repo' in version_detail or \\\n 'not-found' in version_detail or \\\n 'fatal' in version_detail :\n version = version_base\n elif '@' not in version_base:\n version = '%s-%s' % (version_base, version_detail)\n else:\n version = version_base\n\n # make sure the version files exist for the runtime version inspection\n path = '%s/%s' % (src_root, mod_root)\n with open(path + \"/VERSION\", \"w\") as f:\n f.write(version + \"\\n\")\n\n sdist_name = \"%s-%s.tar.gz\" % (name, version)\n sdist_name = sdist_name.replace('/', '-')\n sdist_name = sdist_name.replace('@', '-')\n sdist_name = sdist_name.replace('#', '-')\n sdist_name = sdist_name.replace('_', '-')\n\n if '--record' in sys.argv or \\\n 'bdist_egg' in sys.argv or \\\n 'bdist_wheel' in sys.argv :\n # pip install stage 2 or easy_install stage 1\n #\n # pip install will untar the sdist in a tmp tree. In that tmp\n # tree, we won't be able to derive git version tags -- so we pack the\n # formerly derived version as ./VERSION\n shutil.move(\"VERSION\", \"VERSION.bak\") # backup version\n shutil.copy(\"%s/VERSION\" % path, \"VERSION\") # use full version instead\n os.system (\"python setup.py sdist\") # build sdist\n shutil.copy('dist/%s' % sdist_name,\n '%s/%s' % (mod_root, sdist_name)) # copy into tree\n shutil.move(\"VERSION.bak\", \"VERSION\") # restore version\n\n with open(path + \"/SDIST\", \"w\") as f:\n f.write(sdist_name + \"\\n\")\n\n return version_base, version_detail, sdist_name\n\n except Exception as e :\n raise RuntimeError('Could not extract/set version: %s' % e)",
"def get_versions(verbose=False):\n \"\"\"Get the project version from whatever source is available.\n\n Returns dict with two keys: 'version' and 'full'.\n \"\"\"\n if \"versioneer\" in sys.modules:\n # see the discussion in cmdclass.py:get_cmdclass()\n del sys.modules[\"versioneer\"]\n\n root = get_root()\n cfg = get_config_from_root(root)\n\n assert cfg.VCS is not None, \"please set [versioneer]VCS= in setup.cfg\"\n handlers = HANDLERS.get(cfg.VCS)\n assert handlers, \"unrecognized VCS '%s'\" % cfg.VCS\n verbose = verbose or cfg.verbose\n assert cfg.versionfile_source is not None, \\\n \"please set versioneer.versionfile_source\"\n assert cfg.tag_prefix is not None, \"please set versioneer.tag_prefix\"\n\n versionfile_abs = os.path.join(root, cfg.versionfile_source)\n\n # extract version from first of: _version.py, VCS command (e.g. 'git\n # describe'), parentdir. This is meant to work for developers using a\n # source checkout, for users of a tarball created by 'setup.py sdist',\n # and for users of a tarball/zipball created by 'git archive' or github's\n # download-from-tag feature or the equivalent in other VCSes.\n\n get_keywords_f = handlers.get(\"get_keywords\")\n from_keywords_f = handlers.get(\"keywords\")\n if get_keywords_f and from_keywords_f:\n try:\n keywords = get_keywords_f(versionfile_abs)\n ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)\n if verbose:\n print(\"got version from expanded keyword %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n try:\n ver = versions_from_file(versionfile_abs)\n if verbose:\n print(\"got version from file %s %s\" % (versionfile_abs, ver))\n return ver\n except NotThisMethod:\n pass\n\n from_vcs_f = handlers.get(\"pieces_from_vcs\")\n if from_vcs_f:\n try:\n pieces = from_vcs_f(cfg.tag_prefix, root, verbose)\n ver = render(pieces, cfg.style)\n if verbose:\n print(\"got version from VCS %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n try:\n if cfg.parentdir_prefix:\n ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n if verbose:\n print(\"got version from parentdir %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n if verbose:\n print(\"unable to compute version\")\n\n return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n \"dirty\": None, \"error\": \"unable to compute version\"}",
"def get_versions(default=DEFAULT, verbose=False):\n \"\"\"This variation of get_versions() will be used in versioneer.py .\"\"\"\n\n\n # returns dict with two keys: 'version' and 'full'\n assert versionfile_source is not None, \"please set versioneer.versionfile_source\"\n assert tag_prefix is not None, \"please set versioneer.tag_prefix\"\n assert parentdir_prefix is not None, \"please set versioneer.parentdir_prefix\"\n assert VCS is not None, \"please set versioneer.VCS\"\n\n # I am in versioneer.py, which must live at the top of the source tree,\n # which we use to compute the root directory. py2exe/bbfreeze/non-CPython\n # don't have __file__, in which case we fall back to sys.argv[0] (which\n # ought to be the setup.py script). We prefer __file__ since that's more\n # robust in cases where setup.py was invoked in some weird way (e.g. pip)\n root = get_root()\n versionfile_abs = os.path.join(root, versionfile_source)\n\n # extract version from first of _version.py, VCS command (e.g. 'git\n # describe'), parentdir. This is meant to work for developers using a\n # source checkout, for users of a tarball created by 'setup.py sdist',\n # and for users of a tarball/zipball created by 'git archive' or github's\n # download-from-tag feature or the equivalent in other VCSes.\n\n # Try to get the version info from the VCS-specific replacement keywords.\n\n get_keywords_f = vcs_function(VCS, \"get_keywords\")\n versions_from_keywords_f = vcs_function(VCS, \"versions_from_keywords\")\n if get_keywords_f and versions_from_keywords_f:\n vcs_keywords = get_keywords_f(versionfile_abs)\n ver = versions_from_keywords_f(vcs_keywords, tag_prefix)\n if ver:\n if verbose: print(\"got version from expanded keyword %s\" % ver)\n return ver\n\n # Try to get the version info from _version.py .\n\n ver = versions_from_file(versionfile_abs)\n if ver:\n if verbose: print(\"got version from file %s %s\" % (versionfile_abs,ver))\n return ver\n\n # Try to get the version info from the VCS, directly.\n\n versions_from_vcs_f = vcs_function(VCS, \"versions_from_vcs\")\n if versions_from_vcs_f:\n ver = versions_from_vcs_f(tag_prefix, root, verbose)\n if ver:\n if verbose: print(\"got version from VCS %s\" % ver)\n return ver\n\n # Try to get the version info from the directory's naming.\n\n ver = versions_from_parentdir(parentdir_prefix, root, verbose)\n if ver:\n if verbose: print(\"got version from parentdir %s\" % ver)\n return ver\n\n if verbose: print(\"got version from default %s\" % default)\n return default"
] |
[
0.7725598216056824,
0.7593209147453308,
0.7382193803787231,
0.7354882955551147,
0.7267719507217407,
0.721881091594696,
0.7006163597106934,
0.6965499520301819,
0.6929922103881836,
0.6898463368415833,
0.6882026791572571,
0.6879287362098694
] |
Python won't realise that new module has appeared in the runtime
We need to clean the cache of module finders. Hacking again
:return:
|
def clean_cache():
"""
Python won't realise that new module has appeared in the runtime
We need to clean the cache of module finders. Hacking again
:return:
"""
import importlib
try: # Python ver < 3.3
vermod = importlib.import_module("versioneer")
globals()["versioneer"] = vermod
except ImportError:
importlib.invalidate_caches()
|
[
"def cache_py2_modules():\n \"\"\"\n Currently this function is unneeded, as we are not attempting to provide import hooks\n for modules with ambiguous names: email, urllib, pickle.\n \"\"\"\n if len(sys.py2_modules) != 0:\n return\n assert not detect_hooks()\n import urllib\n sys.py2_modules['urllib'] = urllib\n\n import email\n sys.py2_modules['email'] = email\n\n import pickle\n sys.py2_modules['pickle'] = pickle",
"def module(self):\n '''Python module to run the job.\n\n This is used by :func:`run` and the standard worker system.\n If the work spec contains keys ``module``, ``run_function``,\n and ``terminate_function``, then this contains the Python\n module object named as ``module``; otherwise this contains\n :const:`None`.\n\n '''\n if self._module_cache is None:\n funclist = filter(None, (self.spec.get('run_function'),\n self.spec.get('terminate_function')))\n if funclist:\n try:\n self._module_cache = __import__(\n self.spec['module'], globals(), (), funclist, -1)\n except Exception:\n logger.error('failed to load spec[\"module\"] = %r',\n self.spec['module'], exc_info=True)\n raise\n return self._module_cache",
"def __build_python_module_cache(self):\n \"\"\"Recursively walks through the b2/src subdirectories and\n creates an index of base module name to package name. The\n index is stored within self.__python_module_cache and allows\n for an O(1) module lookup.\n\n For example, given the base module name `toolset`,\n self.__python_module_cache['toolset'] will return\n 'b2.build.toolset'\n\n pkgutil.walk_packages() will find any python package\n provided a directory contains an __init__.py. This has the\n added benefit of allowing libraries to be installed and\n automatically avaiable within the contrib directory.\n\n *Note*: pkgutil.walk_packages() will import any subpackage\n in order to access its __path__variable. Meaning:\n any initialization code will be run if the package hasn't\n already been imported.\n \"\"\"\n cache = {}\n for importer, mname, ispkg in pkgutil.walk_packages(b2.__path__, prefix='b2.'):\n basename = mname.split('.')[-1]\n # since the jam code is only going to have \"import toolset ;\"\n # it doesn't matter if there are separately named \"b2.build.toolset\" and\n # \"b2.contrib.toolset\" as it is impossible to know which the user is\n # referring to.\n if basename in cache:\n self.manager.errors()('duplicate module name \"{0}\" '\n 'found in boost-build path'.format(basename))\n cache[basename] = mname\n self.__python_module_cache = cache",
"def _init_module_cache():\n\t\t\"\"\"\n\t\tModule caching, it helps with not having to import again and again same modules.\n\t\t@return: boolean, True if module caching has been done, False if module caching was already done.\n\t\t\"\"\"\n\n\t\t# While there are not loaded modules, load these ones\n\t\tif len(FieldTranslation._modules) < len(FieldTranslation._model_module_paths):\n\t\t\tfor module_path in FieldTranslation._model_module_paths:\n\t\t\t\tFieldTranslation._modules[module_path] = importlib.import_module(module_path)\n\t\t\treturn True\n\t\treturn False",
"def autodiscover(site=None):\n \"\"\"\n Auto-discover INSTALLED_APPS nexus.py modules and fail silently when\n not present. This forces an import on them to register any api bits they\n may want.\n\n Specifying ``site`` will register all auto discovered modules with the new site.\n \"\"\"\n # Bail out if autodiscover didn't finish loading from a previous call so\n # that we avoid running autodiscover again when the URLconf is loaded by\n # the exception handler to resolve the handler500 view. This prevents an\n # admin.py module with errors from re-registering models and raising a\n # spurious AlreadyRegistered exception (see #8245).\n global LOADING\n if LOADING:\n return\n LOADING = True\n\n if site:\n orig_site = globals()['site']\n globals()['site'] = locals()['site']\n\n import imp\n from django.utils.importlib import import_module\n from django.conf import settings\n\n for app in settings.INSTALLED_APPS:\n # For each app, we need to look for an api.py inside that app's\n # package. We can't use os.path here -- recall that modules may be\n # imported different ways (think zip files) -- so we need to get\n # the app's __path__ and look for admin.py on that path.\n\n # Step 1: find out the app's __path__ Import errors here will (and\n # should) bubble up, but a missing __path__ (which is legal, but weird)\n # fails silently -- apps that do weird things with __path__ might\n # need to roll their own admin registration.\n try:\n app_path = import_module(app).__path__\n except (AttributeError, ImportError):\n continue\n\n # Step 2: use imp.find_module to find the app's admin.py. For some\n # reason imp.find_module raises ImportError if the app can't be found\n # but doesn't actually try to import the module. So skip this app if\n # its admin.py doesn't exist\n try:\n imp.find_module('nexus_modules', app_path)\n except ImportError:\n continue\n\n # Step 3: import the app's admin file. If this has errors we want them\n # to bubble up.\n import_module(\"%s.nexus_modules\" % app)\n # # load builtins\n # from gargoyle.builtins import *\n\n if site:\n globals()['site'] = orig_site\n\n # autodiscover was successful, reset loading flag.\n LOADING = False",
"def restore_sys_modules(scrubbed):\n \"\"\"\n Add any previously scrubbed modules back to the sys.modules cache,\n but only if it's safe to do so.\n \"\"\"\n clash = set(sys.modules) & set(scrubbed)\n if len(clash) != 0:\n # If several, choose one arbitrarily to raise an exception about\n first = list(clash)[0]\n raise ImportError('future module {} clashes with Py2 module'\n .format(first))\n sys.modules.update(scrubbed)",
"def update_module(self, modname, underlined=None):\n \"\"\"Update the cache for global names in `modname` module\n\n `modname` is the name of a module.\n \"\"\"\n try:\n pymodule = self.project.get_module(modname)\n self._add_names(pymodule, modname, underlined)\n except exceptions.ModuleNotFoundError:\n pass",
"def load_module(self, path, changed_time, parser=None):\n \"\"\"Attempts to load the specified module from a serialized, cached\n version. If that fails, the method returns none.\"\"\"\n if settings.use_filesystem_cache == False:\n return None\n \n try:\n pickle_changed_time = self._index[path]\n except KeyError:\n return None\n\n if (changed_time is not None and\n pickle_changed_time < changed_time):\n # the pickle file is outdated\n return None\n\n target_path = self._get_hashed_path(path)\n with open(target_path, 'rb') as f:\n try:\n gc.disable()\n cache_module = pickle.load(f)\n if parser is not None:\n for mod in cache_module:\n mod.unpickle(parser)\n finally:\n gc.enable()\n\n debug.dbg('pickle loaded: %s', path)\n return cache_module",
"def autodiscover():\n '''\n Auto-discover INSTALLED_APPS autofixtures.py and tests.py modules and fail\n silently when not present. This forces an import on them to register any\n autofixture bits they may want.\n '''\n from .compat import importlib\n\n # Bail out if autodiscover didn't finish loading from a previous call so\n # that we avoid running autodiscover again when the URLconf is loaded by\n # the exception handler to resolve the handler500 view. This prevents an\n # autofixtures.py module with errors from re-registering models and raising a\n # spurious AlreadyRegistered exception (see #8245).\n global LOADING\n if LOADING:\n return\n LOADING = True\n app_paths = {}\n\n # For each app, we need to look for an autofixture.py inside that app's\n # package. We can't use os.path here -- recall that modules may be\n # imported different ways (think zip files) -- so we need to get\n # the app's __path__ and look for autofixture.py on that path.\n\n # Step 1: find out the app's __path__ Import errors here will (and\n # should) bubble up, but a missing __path__ (which is legal, but weird)\n # fails silently -- apps that do weird things with __path__ might\n # need to roll their own autofixture registration.\n\n import imp\n try:\n from django.apps import apps\n\n for app_config in apps.get_app_configs():\n app_paths[app_config.name] = [app_config.path]\n\n except ImportError:\n # Django < 1.7\n from django.conf import settings\n\n for app in settings.INSTALLED_APPS:\n mod = importlib.import_module(app)\n try:\n app_paths[app] = mod.__path__\n except AttributeError:\n continue\n\n for app, app_path in app_paths.items():\n # Step 2: use imp.find_module to find the app's autofixtures.py. For some\n # reason imp.find_module raises ImportError if the app can't be found\n # but doesn't actually try to import the module. So skip this app if\n # its autofixtures.py doesn't exist\n try:\n file, _, _ = imp.find_module('autofixtures', app_path)\n except ImportError:\n continue\n else:\n if file:\n file.close()\n\n # Step 3: import the app's autofixtures file. If this has errors we want them\n # to bubble up.\n try:\n importlib.import_module(\"%s.autofixtures\" % app)\n except Exception as e:\n warnings.warn(u'Error while importing %s.autofixtures: %r' %\n (app, e))\n\n for app, app_path in app_paths.items():\n try:\n file, _, _ = imp.find_module('tests', app_path)\n except ImportError:\n continue\n else:\n if file:\n file.close()\n\n try:\n importlib.import_module(\"%s.tests\" % app)\n except Exception as e:\n warnings.warn(u'Error while importing %s.tests: %r' %\n (app, e))\n\n # autodiscover was successful, reset loading flag.\n LOADING = False",
"def pycache_clean(context):\n \"Remove __pycache__ directories\"\n #pylint: disable=unused-argument\n dirs = set()\n for root, dirnames, _ in os.walk(os.curdir):\n if '__pycache__' in dirnames:\n dirs.add(os.path.join(root, '__pycache__'))\n print(\"Removing __pycache__ directories\")\n rmrf(dirs, verbose=False)",
"def find_module(cls, fullname, path=None):\n \"\"\"find the module on sys.path or 'path' based on sys.path_hooks and\n sys.path_importer_cache.\n This method is for python2 only\n \"\"\"\n spec = cls.find_spec(fullname, path)\n if spec is None:\n return None\n elif spec.loader is None and spec.submodule_search_locations:\n # Here we need to create a namespace loader to handle namespaces since python2 doesn't...\n return NamespaceLoader2(spec.name, spec.submodule_search_locations)\n else:\n return spec.loader",
"def watch_module_cache_get(cache, module):\n \"\"\"\n When we ask to fetch a module with optional config file, check time stamps\n and dependencies to determine if it should be reloaded or not.\n\n :param cache: the cache object that stores whether to check for config\n files and which files have been loaded.\n :param module: the path of the module to load.\n :returns: the loaded module.\n \"\"\"\n imp.acquire_lock()\n try:\n if not hasattr(cache, \"timestamps\"):\n cache.timestamps = {}\n mtime = os.path.getmtime(module)\n mtime = latest_submodule_time(module, mtime)\n if getattr(cache, \"config\", False):\n config_file = module[:-2] + \"yaml\"\n if os.path.exists(config_file):\n # Our timestamp is the latest time of the config file or the\n # module.\n mtime = max(mtime, os.path.getmtime(config_file))\n # If we have a config file and the timestamp is more recent than\n # the recorded timestamp, remove the config file from the list of\n # loaded files so that it will get loaded again.\n if config_file in cache.config_files and mtime > cache.timestamps.get(module, 0):\n del cache.config_files[config_file]\n tangelo.log(\"WATCH\", \"Asking to reload config file %s\" % config_file)\n # If the timestamp is more recent than the recorded value, remove the\n # the module from our records so that it will be loaded again.\n if module in cache.modules and mtime > cache.timestamps.get(module, 0):\n del cache.modules[module]\n tangelo.log(\"WATCH\", \"Asking to reload module %s\" % module)\n if module not in cache.timestamps:\n tangelo.log_info(\"WATCH\", \"Monitoring module %s\" % module)\n reload_recent_submodules(module, mtime)\n cache.timestamps[module] = mtime\n service = tangelo_module_cache_get(cache, module)\n # Update our time based on all the modules that we may have just\n # imported. The times can change from before because python files are\n # compiled, for instance.\n mtime = latest_submodule_time(module, mtime)\n cache.timestamps[module] = mtime\n finally:\n imp.release_lock()\n return service"
] |
[
0.7373819351196289,
0.7117829918861389,
0.7047575116157532,
0.7046589255332947,
0.6884981989860535,
0.6834573745727539,
0.6827386617660522,
0.6806471943855286,
0.6788356900215149,
0.6784219741821289,
0.6779079437255859,
0.677069902420044
] |
Get project version (using versioneer)
:return: string containing version
|
def get_version():
"""
Get project version (using versioneer)
:return: string containing version
"""
setup_versioneer()
clean_cache()
import versioneer
version = versioneer.get_version()
parsed_version = parse_version(version)
if '*@' in str(parsed_version):
import time
version += str(int(time.time()))
return version
|
[
"def _get_version():\n \"\"\"Return the project version from VERSION file.\"\"\"\n\n with open(join(dirname(__file__), '{{project.package}}/VERSION'), 'rb') as f:\n version = f.read().decode('ascii').strip()\n return version",
"def _get_version():\n \"\"\"Return the project version from VERSION file.\"\"\"\n with open(os.path.join(os.path.dirname(__file__), PACKAGE_NAME, 'VERSION'), 'rb') as f:\n version = f.read().decode('ascii').strip()\n return version",
"def version(cls): # noqa: N805 # pylint: disable=no-self-argument\n \"\"\"\n :py:class:Returns `str` -- Returns :attr:`_version_` if set,\n otherwise falls back to module `__version__` or None\n \"\"\"\n return cls._version_ or getattr(sys.modules.get(cls.__module__, None),\n '__version__', None)",
"def get_versions(verbose=False):\n \"\"\"Get the project version from whatever source is available.\n\n Returns dict with two keys: 'version' and 'full'.\n \"\"\"\n if \"versioneer\" in sys.modules:\n # see the discussion in cmdclass.py:get_cmdclass()\n del sys.modules[\"versioneer\"]\n\n root = get_root()\n cfg = get_config_from_root(root)\n\n assert cfg.VCS is not None, \"please set [versioneer]VCS= in setup.cfg\"\n handlers = HANDLERS.get(cfg.VCS)\n assert handlers, \"unrecognized VCS '%s'\" % cfg.VCS\n verbose = verbose or cfg.verbose\n assert cfg.versionfile_source is not None, \\\n \"please set versioneer.versionfile_source\"\n assert cfg.tag_prefix is not None, \"please set versioneer.tag_prefix\"\n\n versionfile_abs = os.path.join(root, cfg.versionfile_source)\n\n # extract version from first of: _version.py, VCS command (e.g. 'git\n # describe'), parentdir. This is meant to work for developers using a\n # source checkout, for users of a tarball created by 'setup.py sdist',\n # and for users of a tarball/zipball created by 'git archive' or github's\n # download-from-tag feature or the equivalent in other VCSes.\n\n get_keywords_f = handlers.get(\"get_keywords\")\n from_keywords_f = handlers.get(\"keywords\")\n if get_keywords_f and from_keywords_f:\n try:\n keywords = get_keywords_f(versionfile_abs)\n ver = from_keywords_f(keywords, cfg.tag_prefix, verbose)\n if verbose:\n print(\"got version from expanded keyword %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n try:\n ver = versions_from_file(versionfile_abs)\n if verbose:\n print(\"got version from file %s %s\" % (versionfile_abs, ver))\n return ver\n except NotThisMethod:\n pass\n\n from_vcs_f = handlers.get(\"pieces_from_vcs\")\n if from_vcs_f:\n try:\n pieces = from_vcs_f(cfg.tag_prefix, root, verbose)\n ver = render(pieces, cfg.style)\n if verbose:\n print(\"got version from VCS %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n try:\n if cfg.parentdir_prefix:\n ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose)\n if verbose:\n print(\"got version from parentdir %s\" % ver)\n return ver\n except NotThisMethod:\n pass\n\n if verbose:\n print(\"unable to compute version\")\n\n return {\"version\": \"0+unknown\", \"full-revisionid\": None,\n \"dirty\": None, \"error\": \"unable to compute version\"}",
"def get_version():\n \"\"\"Returns version number, without module import (which can lead to ImportError\n if some dependencies are unavailable before install.\"\"\"\n contents = read_file(os.path.join('admirarchy', '__init__.py'))\n version = re.search('VERSION = \\(([^)]+)\\)', contents)\n version = version.group(1).replace(', ', '.').strip()\n return version",
"def get_version(version):\n \"\"\"\n Returns a PEP 440-compliant version number from VERSION.\n\n Created by modifying django.utils.version.get_version\n \"\"\"\n\n # Now build the two parts of the version number:\n # major = X.Y[.Z]\n # sub = .devN - for development releases\n # | {a|b|rc}N - for alpha, beta and rc releases\n # | .postN - for post-release releases\n\n assert len(version) == 5\n\n version_parts = version[:2] if version[2] == 0 else version[:3]\n\n # Build the first part of the version\n major = '.'.join(str(x) for x in version_parts)\n\n # Just return it if this is a final release version\n if version[3] == 'final':\n return major\n\n # Add the rest\n sub = ''.join(str(x) for x in version[3:5])\n\n if version[3] == 'dev':\n # Override the sub part. Add in a timestamp\n timestamp = get_git_changeset()\n sub = 'dev%s' % (timestamp if timestamp else version[4])\n return '%s.%s' % (major, sub)\n if version[3] == 'post':\n # We need a dot for post\n return '%s.%s' % (major, sub)\n elif version[3] in ('a', 'b', 'rc'):\n # No dot for these\n return '%s%s' % (major, sub)\n else:\n raise ValueError('Invalid version: %s' % str(version))",
"def get_version():\n \"\"\"Get the version number of this package.\n\n Returns:\n str: The version number (marjor.minor.patch).\n\n Note:\n When this package is installed, the version number will be available through the\n package resource details. Otherwise this method will look for a ``.semver`` file.\n\n Note:\n In rare cases corrupt installs can cause the version number to be unknown. In this case\n the version number will be set to the string \"Unknown\".\n\n \"\"\"\n\n if PackageHelper.__version:\n return PackageHelper.__version\n\n PackageHelper.__version = \"Unknown\"\n\n # If this is a GIT clone without install, use the ``.semver`` file.\n file = os.path.realpath(__file__)\n folder = os.path.dirname(file)\n\n try:\n semver = open(folder + \"/../../.semver\", \"r\")\n PackageHelper.__version = semver.read().rstrip()\n semver.close()\n return PackageHelper.__version\n except:\n pass\n\n # If the package was installed, get the version number via Python's distribution details.\n try:\n distribution = pkg_resources.get_distribution(PackageHelper.get_alias())\n if distribution.version:\n PackageHelper.__version = distribution.version\n return PackageHelper.__version\n except:\n pass\n\n return PackageHelper.__version",
"def get_version(path):\n \"\"\"Return the project version from VERSION file.\"\"\"\n\n with open(os.path.join(path, 'VERSION'), 'rb') as f:\n version = f.read().decode('ascii').strip()\n return version.strip()",
"def get_version():\n \"\"\"Get version from git and VERSION file.\n\n In the case where the version is not tagged in git, this function appends\n .post0+commit if the version has been released and .dev0+commit if the\n version has not yet been released.\n\n Derived from: https://github.com/Changaco/version.py\n \"\"\"\n d = os.path.dirname(__file__)\n # get release number from VERSION\n with open(os.path.join(d, 'VERSION')) as f:\n vre = re.compile('.Version: (.+)$', re.M)\n version = vre.search(f.read()).group(1)\n\n if os.path.isdir(os.path.join(d, '.git')):\n # Get the version using \"git describe\".\n cmd = 'git describe --tags'\n try:\n git_version = check_output(cmd.split()).decode().strip()[1:]\n except CalledProcessError:\n print('Unable to get version number from git tags\\n'\n 'Setting to x.x')\n git_version = 'x.x'\n\n # PEP440 compatibility\n if '-' in git_version:\n git_revision = check_output(['git', 'rev-parse', 'HEAD'])\n git_revision = git_revision.strip().decode('ascii')\n # add post0 if the version is released\n # otherwise add dev0 if the version is not yet released\n if ISRELEASED:\n version += '.post0+' + git_revision[:7]\n else:\n version += '.dev0+' + git_revision[:7]\n\n return version",
"def version(self) -> str:\n \"\"\"\n Version version number property. Must be a string consisting of three\n non-negative integers delimited by periods (eg. '1.0.1').\n \"\"\"\n version: str = (\n str(self._major) + '.' +\n str(self._minor) + '.' +\n str(self._patch)\n )\n return version",
"def get_version():\n \"\"\"Extracts the version number from the version.py file.\n \"\"\"\n VERSION_FILE = os.path.join(module_name, 'version.py')\n txt = open(VERSION_FILE).read()\n mo = re.search(r'^__version__ = [\\'\"]([^\\'\"]*)[\\'\"]', txt, re.M)\n if mo:\n version = mo.group(1)\n bs_version = os.environ.get('MODULEVER', '0.0')\n assert bs_version == \"0.0\" or bs_version == version, \\\n \"Version {} specified by the build system doesn't match {} in \" \\\n \"version.py\".format(bs_version, version)\n return version\n else:\n raise RuntimeError('Unable to find version string in {0}.'\n .format(VERSION_FILE))",
"def get_version():\n \"\"\"Extracts the version number from the version.py file.\"\"\"\n VERSION_FILE = '../malcolm/version.py'\n mo = re.search(r'^__version__ = [\\'\"]([^\\'\"]*)[\\'\"]',\n open(VERSION_FILE, 'rt').read(), re.M)\n if mo:\n return mo.group(1)\n else:\n raise RuntimeError(\n 'Unable to find version string in {0}.'.format(VERSION_FILE))"
] |
[
0.7875058650970459,
0.7697684168815613,
0.7648671865463257,
0.759833574295044,
0.7570896744728088,
0.7523835301399231,
0.7461139559745789,
0.7398679256439209,
0.7363691926002502,
0.7361968159675598,
0.7345678210258484,
0.7333527207374573
] |
Find the common prefix of two or more paths.
::
>>> import pathlib
>>> one = pathlib.Path('foo/bar/baz')
>>> two = pathlib.Path('foo/quux/biz')
>>> three = pathlib.Path('foo/quux/wuux')
::
>>> import uqbar.io
>>> str(uqbar.io.find_common_prefix([one, two, three]))
'foo'
:param paths: paths to inspect
|
def find_common_prefix(
paths: Sequence[Union[str, pathlib.Path]]
) -> Optional[pathlib.Path]:
"""
Find the common prefix of two or more paths.
::
>>> import pathlib
>>> one = pathlib.Path('foo/bar/baz')
>>> two = pathlib.Path('foo/quux/biz')
>>> three = pathlib.Path('foo/quux/wuux')
::
>>> import uqbar.io
>>> str(uqbar.io.find_common_prefix([one, two, three]))
'foo'
:param paths: paths to inspect
"""
counter: collections.Counter = collections.Counter()
for path in paths:
path = pathlib.Path(path)
counter.update([path])
counter.update(path.parents)
valid_paths = sorted(
[path for path, count in counter.items() if count >= len(paths)],
key=lambda x: len(x.parts),
)
if valid_paths:
return valid_paths[-1]
return None
|
[
"def _StripCommonPathPrefix(paths):\n \"\"\"Removes path common prefix from a list of path strings.\"\"\"\n # Find the longest common prefix in terms of characters.\n common_prefix = os.path.commonprefix(paths)\n # Truncate at last segment boundary. E.g. '/aa/bb1/x.py' and '/a/bb2/x.py'\n # have '/aa/bb' as the common prefix, but we should strip '/aa/' instead.\n # If there's no '/' found, returns -1+1=0.\n common_prefix_len = common_prefix.rfind('/') + 1\n return [path[common_prefix_len:] for path in paths]",
"def posix_commonpath(paths):\n \"\"\"Given a sequence of POSIX path names,\n return the longest common sub-path.\"\"\"\n\n if not paths:\n raise ValueError('commonpath() arg is an empty sequence')\n\n check_arg_types('commonpath', *paths)\n\n if isinstance(paths[0], bytes):\n sep = b'/'\n curdir = b'.'\n else:\n sep = '/'\n curdir = '.'\n\n split_paths = [path.split(sep) for path in paths]\n\n try:\n isabs, = set(p[:1] == sep for p in paths)\n except ValueError:\n raise ValueError(\"Can't mix absolute and relative paths\")\n\n split_paths = [[c for c in s if c and c != curdir] for s in split_paths]\n s_min = min(split_paths)\n s_max = max(split_paths)\n common = s_min\n for i, run_c in enumerate(s_min):\n if run_c != s_max[i]:\n common = s_min[:i]\n break\n\n prefix = sep if isabs else sep[:0]\n return prefix + sep.join(common)",
"def common_directory(paths):\n \"\"\"Find the deepest common directory of a list of paths.\n\n :return: if no paths are provided, None is returned;\n if there is no common directory, '' is returned;\n otherwise the common directory with a trailing / is returned.\n \"\"\"\n import posixpath\n def get_dir_with_slash(path):\n if path == b'' or path.endswith(b'/'):\n return path\n else:\n dirname, basename = posixpath.split(path)\n if dirname == b'':\n return dirname\n else:\n return dirname + b'/'\n\n if not paths:\n return None\n elif len(paths) == 1:\n return get_dir_with_slash(paths[0])\n else:\n common = common_path(paths[0], paths[1])\n for path in paths[2:]:\n common = common_path(common, path)\n return get_dir_with_slash(common)",
"def _find_common_roots(paths):\n \"\"\"Out of some paths it finds the common roots that need monitoring.\"\"\"\n paths = [x.split(os.path.sep) for x in paths]\n root = {}\n for chunks in sorted(paths, key=len, reverse=True):\n node = root\n for chunk in chunks:\n node = node.setdefault(chunk, {})\n node.clear()\n\n rv = set()\n\n def _walk(node, path):\n for prefix, child in iteritems(node):\n _walk(child, path + (prefix,))\n if not node:\n rv.add(\"/\".join(path))\n\n _walk(root, ())\n return rv",
"def nt_commonpath(paths): # pylint: disable=too-many-locals\n \"\"\"Given a sequence of NT path names,\n return the longest common sub-path.\"\"\"\n\n from ntpath import splitdrive\n\n if not paths:\n raise ValueError('commonpath() arg is an empty sequence')\n\n check_arg_types('commonpath', *paths)\n\n if isinstance(paths[0], bytes):\n sep = b'\\\\'\n altsep = b'/'\n curdir = b'.'\n else:\n sep = '\\\\'\n altsep = '/'\n curdir = '.'\n\n drivesplits = [splitdrive(p.replace(altsep, sep).lower()) for p in paths]\n split_paths = [p.split(sep) for d, p in drivesplits]\n\n try:\n isabs, = set(p[:1] == sep for d, p in drivesplits)\n except ValueError:\n raise ValueError(\"Can't mix absolute and relative paths\")\n\n # Check that all drive letters or UNC paths match. The check is made\n # only now otherwise type errors for mixing strings and bytes would not\n # be caught.\n if len(set(d for d, p in drivesplits)) != 1:\n raise ValueError(\"Paths don't have the same drive\")\n\n drive, path = splitdrive(paths[0].replace(altsep, sep))\n common = path.split(sep)\n common = [c for c in common if c and c != curdir]\n\n split_paths = [[c for c in s if c and c != curdir] for s in split_paths]\n s_min = min(split_paths)\n s_max = max(split_paths)\n for i, run_c in enumerate(s_min):\n if run_c != s_max[i]:\n common = common[:i]\n break\n else:\n common = common[:len(s_min)]\n\n prefix = drive + sep if isabs else drive\n return prefix + sep.join(common)",
"def _common_prefix(names):\n \"\"\"Get the common prefix for all names\"\"\"\n if not names:\n return ''\n prefix = names[0]\n for name in names:\n i = 0\n while i < len(prefix) and i < len(name) and prefix[i] == name[i]:\n i += 1\n prefix = prefix[:i]\n return prefix",
"def commonpath(paths):\n \"\"\"py2 compatible version of py3's os.path.commonpath\n\n >>> commonpath([\"\"])\n ''\n >>> commonpath([\"/\"])\n '/'\n >>> commonpath([\"/a\"])\n '/a'\n >>> commonpath([\"/a//\"])\n '/a'\n >>> commonpath([\"/a\", \"/a\"])\n '/a'\n >>> commonpath([\"/a/b\", \"/a\"])\n '/a'\n >>> commonpath([\"/a/b\", \"/a/b\"])\n '/a/b'\n >>> commonpath([\"/a/b/c\", \"/a/b/d\"])\n '/a/b'\n >>> commonpath([\"/a/b/c\", \"/a/b/d\", \"//a//b//e//\"])\n '/a/b'\n >>> commonpath([])\n Traceback (most recent call last):\n ...\n ValueError: commonpath() arg is an empty sequence\n >>> commonpath([\"/absolute/path\", \"relative/path\"])\n Traceback (most recent call last):\n ... \n ValueError: (Can't mix absolute and relative paths\")\n \"\"\"\n assert os.sep == \"/\", \"tested only on slash-delimited paths\"\n split_re = re.compile(os.sep + \"+\")\n\n if len(paths) == 0:\n raise ValueError(\"commonpath() arg is an empty sequence\")\n\n spaths = [p.rstrip(os.sep) for p in paths]\n splitpaths = [split_re.split(p) for p in spaths]\n if all(p.startswith(os.sep) for p in paths):\n abs_paths = True\n splitpaths = [p[1:] for p in splitpaths]\n elif all(not p.startswith(os.sep) for p in paths):\n abs_paths = False\n else:\n raise ValueError(\"Can't mix absolute and relative paths\")\n\n splitpaths0 = splitpaths[0]\n splitpaths1n = splitpaths[1:]\n min_length = min(len(p) for p in splitpaths)\n equal = [i for i in range(min_length) if all(splitpaths0[i] == sp[i] for sp in splitpaths1n)]\n max_equal = max(equal or [-1])\n commonelems = splitpaths0[:max_equal + 1]\n commonpath = os.sep.join(commonelems)\n return (os.sep if abs_paths else '') + commonpath",
"def _find_common_prefix(self, node1, node2):\n \"\"\"Find common prefix between two nodes.\"\"\"\n tokens1 = [item.strip() for item in node1.split(self.node_separator)]\n tokens2 = [item.strip() for item in node2.split(self.node_separator)]\n ret = []\n for token1, token2 in zip(tokens1, tokens2):\n if token1 == token2:\n ret.append(token1)\n else:\n break\n return self.node_separator.join(ret)",
"def shared_prefix(args):\n \"\"\"\n Find the shared prefix between the strings.\n\n For instance:\n\n sharedPrefix(['blahblah', 'blahwhat'])\n\n returns 'blah'.\n \"\"\"\n i = 0\n while i < min(map(len, args)):\n if len(set(map(operator.itemgetter(i), args))) != 1:\n break\n i += 1\n return args[0][:i]",
"def get_common_path(pathlist):\r\n \"\"\"Return common path for all paths in pathlist\"\"\"\r\n common = osp.normpath(osp.commonprefix(pathlist))\r\n if len(common) > 1:\r\n if not osp.isdir(common):\r\n return abspardir(common)\r\n else:\r\n for path in pathlist:\r\n if not osp.isdir(osp.join(common, path[len(common)+1:])):\r\n # `common` is not the real common prefix\r\n return abspardir(common)\r\n else:\r\n return osp.abspath(common)",
"def _commonprefix(files):\n \"\"\"Retrieve a common prefix for files without extra _R1 _I1 extensions.\n\n Allows alternative naming schemes (R1/R2/R3) (R1/R2/I1).\n \"\"\"\n out = os.path.commonprefix(files)\n out = out.rstrip(\"_R\")\n out = out.rstrip(\"_I\")\n out = out.rstrip(\"_\")\n return out",
"function stripCommonPath(paths) {\n var prefix = commonPrefix(paths);\n var prefixLength = prefix.lastIndexOf('/') + 1;\n\n return paths.map(function(str) {\n return str.slice(prefixLength);\n });\n}"
] |
[
0.7656171917915344,
0.7499231696128845,
0.7466578483581543,
0.7462409138679504,
0.729282021522522,
0.7263355851173401,
0.7221022844314575,
0.7215659022331238,
0.7173000574111938,
0.7153367400169373,
0.7125357985496521,
0.7106934785842896
] |
r"""Finds executable `name`.
Similar to Unix ``which`` command.
Returns list of zero or more full paths to `name`.
|
def find_executable(name: str, flags=os.X_OK) -> List[str]:
r"""Finds executable `name`.
Similar to Unix ``which`` command.
Returns list of zero or more full paths to `name`.
"""
result = []
extensions = [x for x in os.environ.get("PATHEXT", "").split(os.pathsep) if x]
path = os.environ.get("PATH", None)
if path is None:
return []
for path in os.environ.get("PATH", "").split(os.pathsep):
path = os.path.join(path, name)
if os.access(path, flags):
result.append(path)
for extension in extensions:
path_extension = path + extension
if os.access(path_extension, flags):
result.append(path_extension)
return result
|
[
"def which(cmd):\n \"\"\"\n Returns full path to a executable.\n\n Args:\n cmd (str): Executable command to search for.\n\n Returns:\n (str) Full path to command. None if it is not found.\n\n Example::\n\n full_path_to_python = which(\"python\")\n \"\"\"\n def is_exe(fp):\n return os.path.isfile(fp) and os.access(fp, os.X_OK)\n\n fpath, fname = os.path.split(cmd)\n if fpath:\n if is_exe(cmd):\n return cmd\n else:\n for path in os.environ[\"PATH\"].split(os.pathsep):\n exe_file = os.path.join(path, cmd)\n if is_exe(exe_file):\n return exe_file\n return None",
"def which(name, flags=os.X_OK):\n \"\"\"Search PATH for executable files with the given name.\n\n ..note:: This function was taken verbatim from the twisted framework,\n licence available here:\n http://twistedmatrix.com/trac/browser/tags/releases/twisted-8.2.0/LICENSE\n\n On newer versions of MS-Windows, the PATHEXT environment variable will be\n set to the list of file extensions for files considered executable. This\n will normally include things like \".EXE\". This function will also find\n files\n with the given name ending with any of these extensions.\n\n On MS-Windows the only flag that has any meaning is os.F_OK. Any other\n flags will be ignored.\n\n :param name: The name for which to search.\n :type name: C{str}\n\n :param flags: Arguments to L{os.access}.\n :type flags: C{int}\n\n :returns: A list of the full paths to files found, in the order in which\n they were found.\n :rtype: C{list}\n \"\"\"\n result = []\n # pylint: disable=W0141\n extensions = [\n _f for _f in os.environ.get(\n 'PATHEXT',\n '').split(\n os.pathsep) if _f]\n # pylint: enable=W0141\n path = os.environ.get('PATH', None)\n # In c6c9b26 we removed this hard coding for issue #529 but I am\n # adding it back here in case the user's path does not include the\n # gdal binary dir on OSX but it is actually there. (TS)\n if sys.platform == 'darwin': # Mac OS X\n gdal_prefix = (\n '/Library/Frameworks/GDAL.framework/'\n 'Versions/Current/Programs/')\n path = '%s:%s' % (path, gdal_prefix)\n\n LOGGER.debug('Search path: %s' % path)\n\n if path is None:\n return []\n\n for p in path.split(os.pathsep):\n p = os.path.join(p, name)\n if os.access(p, flags):\n result.append(p)\n for e in extensions:\n path_extensions = p + e\n if os.access(path_extensions, flags):\n result.append(path_extensions)\n\n return result",
"def find_exe(name):\n \"\"\"\n Find an executable with the given name.\n :param name:\n :return:\n \"\"\"\n for path in os.getenv('PATH').split(os.pathsep):\n for ext in ('', '.exe', '.cmd', '.bat', '.sh'):\n full_path = os.path.join(path, name + ext)\n if os.path.isfile(full_path) and os.access(full_path, os.X_OK):\n return full_path\n return None",
"def _which(executable, flags=os.X_OK, abspath_only=False, disallow_symlinks=False):\n \"\"\"Borrowed from Twisted's :mod:twisted.python.proutils .\n\n Search PATH for executable files with the given name.\n\n On newer versions of MS-Windows, the PATHEXT environment variable will be\n set to the list of file extensions for files considered executable. This\n will normally include things like \".EXE\". This fuction will also find files\n with the given name ending with any of these extensions.\n\n On MS-Windows the only flag that has any meaning is os.F_OK. Any other\n flags will be ignored.\n\n Note: This function does not help us prevent an attacker who can already\n manipulate the environment's PATH settings from placing malicious code\n higher in the PATH. It also does happily follows links.\n\n :param str name: The name for which to search.\n :param int flags: Arguments to L{os.access}.\n :rtype: list\n :returns: A list of the full paths to files found, in the order in which\n they were found.\n \"\"\"\n def _can_allow(p):\n if not os.access(p, flags):\n return False\n if abspath_only and not os.path.abspath(p):\n log.warn('Ignoring %r (path is not absolute)', p)\n return False\n if disallow_symlinks and os.path.islink(p):\n log.warn('Ignoring %r (path is a symlink)', p)\n return False\n return True\n\n result = []\n exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep))\n path = os.environ.get('PATH', None)\n if path is None:\n return []\n for p in os.environ.get('PATH', '').split(os.pathsep):\n p = os.path.join(p, executable)\n if _can_allow(p):\n result.append(p)\n for e in exts:\n pext = p + e\n if _can_allow(pext):\n result.append(pext)\n return result",
"def fallback_which(command, location=None, allow_global=False, system=False):\n \"\"\"\n A fallback implementation of the `which` utility command that relies exclusively on\n searching the path for commands.\n\n :param str command: The command to search for, optional\n :param str location: The search location to prioritize (prepend to path), defaults to None\n :param bool allow_global: Whether to search the global path, defaults to False\n :param bool system: Whether to use the system python instead of pipenv's python, defaults to False\n :raises ValueError: Raised if no command is provided\n :raises TypeError: Raised if the command provided is not a string\n :return: A path to the discovered command location\n :rtype: str\n \"\"\"\n\n from .vendor.pythonfinder import Finder\n if not command:\n raise ValueError(\"fallback_which: Must provide a command to search for...\")\n if not isinstance(command, six.string_types):\n raise TypeError(\"Provided command must be a string, received {0!r}\".format(command))\n global_search = system or allow_global\n if location is None:\n global_search = True\n finder = Finder(system=False, global_search=global_search, path=location)\n if is_python_command(command):\n result = find_python(finder, command)\n if result:\n return result\n result = finder.which(command)\n if result:\n return result.path.as_posix()\n return \"\"",
"def choose(name, choices, default=None, resolve=None, no_choice=('none',)):\n \"\"\"\n Grabs user input from command line from set of provided choices.\n\n :param name: prompt text\n :param choices: list or tuple of available choices. Choices may be\n single strings or (key, value) tuples.\n :param default: default value if no input provided.\n :param no_choice: acceptable list of strings for \"null choice\"\n \"\"\"\n\n if not resolve:\n resolve = lambda o: o.lower()\n\n _choices = []\n options = []\n\n for choice in choices:\n if isinstance(choice, string_type):\n options.append(choice)\n else:\n options.append(\"%s [%s]\" % (choice[1], choice[0]))\n choice = choice[0]\n _choices.append(choice)\n\n while True:\n rv = prompt(name + '? - (%s)' % ', '.join(options), default)\n if not rv:\n return default\n rv = resolve(rv)\n if rv in no_choice:\n return None\n if rv in _choices:\n return rv",
"def find(self, name, namespace=None):\n \"\"\"\n Find plugin object\n\n Parameters\n ----------\n name : string\n A name of the object entry or full namespace\n namespace : string, optional\n A period separated namespace. E.g. `foo.bar.hogehoge`\n\n Returns\n -------\n instance\n An instance found\n\n Raises\n ------\n KeyError\n If the named instance have not registered\n\n Examples\n --------\n >>> registry = Registry()\n >>> registry.register('hello', 'goodbye')\n >>> registry.register('foo', 'bar', 'hoge.hoge.hoge')\n >>> registry.register('foobar', 'foobar', 'hoge.hoge')\n >>> registry.find('hello') == 'goodbye'\n True\n >>> registry.find('foo', 'hoge.hoge.hoge') == 'bar'\n True\n >>> registry.find('hoge.hoge.foobar') == 'foobar'\n True\n\n \"\"\"\n if \".\" in name:\n namespace, name = name.rsplit(\".\", 1)\n caret = self.raw\n if namespace:\n for term in namespace.split('.'):\n if term not in caret:\n caret[term] = Bunch()\n caret = caret[term]\n return caret[name]",
"def search_tree(self, name): # noqa: D302\n r\"\"\"\n Search tree for all nodes with a specific name.\n\n :param name: Node name to search for\n :type name: :ref:`NodeName`\n\n :raises: RuntimeError (Argument \\`name\\` is not valid)\n\n For example:\n\n >>> from __future__ import print_function\n >>> import pprint, ptrie\n >>> tobj = ptrie.Trie('/')\n >>> tobj.add_nodes([\n ... {'name':'root', 'data':[]},\n ... {'name':'root/anode', 'data':7},\n ... {'name':'root/bnode', 'data':[]},\n ... {'name':'root/cnode', 'data':[]},\n ... {'name':'root/bnode/anode', 'data':['a', 'b', 'c']},\n ... {'name':'root/cnode/anode/leaf', 'data':True}\n ... ])\n >>> print(tobj)\n root\n ├anode (*)\n ├bnode\n │└anode (*)\n └cnode\n └anode\n └leaf (*)\n >>> pprint.pprint(tobj.search_tree('anode'), width=40)\n ['root/anode',\n 'root/bnode/anode',\n 'root/cnode/anode',\n 'root/cnode/anode/leaf']\n \"\"\"\n if self._validate_node_name(name):\n raise RuntimeError(\"Argument `name` is not valid\")\n return self._search_tree(name)",
"def which(program):\n \"\"\"\n A python implementation of which.\n\n https://stackoverflow.com/a/2969007\n \"\"\"\n path_ext = [\"\"]\n ext_list = None\n\n if sys.platform == \"win32\":\n ext_list = [ext.lower() for ext in os.environ[\"PATHEXT\"].split(\";\")]\n\n def is_exe(fpath):\n exe = os.path.isfile(fpath) and os.access(fpath, os.X_OK)\n # search for executable under windows\n if not exe:\n if ext_list:\n for ext in ext_list:\n exe_path = f\"{fpath}{ext}\"\n if os.path.isfile(exe_path) and os.access(exe_path, os.X_OK):\n path_ext[0] = ext\n return True\n return False\n return exe\n\n fpath, fname = os.path.split(program)\n\n if fpath:\n if is_exe(program):\n return f\"{program}{path_ext[0]}\"\n else:\n for path in os.environ[\"PATH\"].split(os.pathsep):\n path = path.strip('\"')\n exe_file = os.path.join(path, program)\n if is_exe(exe_file):\n return f\"{exe_file}{path_ext[0]}\"\n return None",
"def names(args):\n \"\"\"\n %prog names namelist templatefile\n\n Generate name blocks from the `namelist` file. The `namelist` file is\n tab-delimited that contains >=4 columns of data. Three columns are mandatory.\n First name, middle initial and last name. First row is table header. For the\n extra columns, the first column will go in the `$N0` field in the template\n file, second to the `$N1` field, etc.\n\n In the alternative mode, the namelist just contains several sections. First\n row will go in the `$N0` in the template file, second to the `$N1` field.\n\n The namelist may look like:\n [Sequence]\n Bruce A. Roe, Frederic Debelle, Giles Oldroyd, Rene Geurts\n [Manuscript]\n Haibao Tang1, Vivek Krishnakumar1, Shelby Bidwell1, Benjamin Rosen1\n\n Then in this example Sequence section goes into N0, Manuscript goes into N1.\n\n Useful hints for constructing the template file can be found in:\n <http://www.ncbi.nlm.nih.gov/IEB/ToolBox/CPP_DOC/asn_spec/seq.asn.html>\n\n Often the template file can be retrieved from web form:\n <http://www.ncbi.nlm.nih.gov/WebSub/template.cgi>\n \"\"\"\n p = OptionParser(names.__doc__)\n opts, args = p.parse_args(args)\n\n if len(args) != 2:\n sys.exit(p.print_help())\n\n namelist, templatefile = args\n\n # First check the alternative format\n if open(namelist).read()[0] == '[':\n out = parse_names(namelist)\n make_template(templatefile, out)\n return\n\n reader = csv.reader(open(namelist), delimiter=\"\\t\")\n header = next(reader)\n ncols = len(header)\n assert ncols > 3\n nextras = ncols - 3\n\n blocks = []\n bools = []\n for row in reader:\n first, middle, last = row[:3]\n extras = row[3:]\n bools.append([(x.upper() == 'Y') for x in extras])\n middle = middle.strip()\n if middle != \"\":\n middle = middle.rstrip('.') + '.'\n initials = \"{0}.{1}\".format(first[0], middle)\n suffix = \"\"\n nameblock = NameTemplate.format(last=last, first=first,\n initials=initials, suffix=suffix)\n blocks.append(nameblock)\n\n selected_idx = zip(*bools)\n out = [] * nextras\n for i, sbools in enumerate(selected_idx):\n selected = []\n for b, ss in zip(blocks, sbools):\n if ss:\n selected.append(b)\n bigblock = \",\\n\".join(selected)\n out.append(bigblock)\n logging.debug(\"List N{0} contains a total of {1} names.\".format(i,\n len(selected)))\n\n make_template(templatefile, out)",
"def which(program, paths=None):\n \"\"\" takes a program name or full path, plus an optional collection of search\n paths, and returns the full path of the requested executable. if paths is\n specified, it is the entire list of search paths, and the PATH env is not\n used at all. otherwise, PATH env is used to look for the program \"\"\"\n\n def is_exe(fpath):\n return (os.path.exists(fpath) and\n os.access(fpath, os.X_OK) and\n os.path.isfile(os.path.realpath(fpath)))\n\n found_path = None\n fpath, fname = os.path.split(program)\n\n # if there's a path component, then we've specified a path to the program,\n # and we should just test if that program is executable. if it is, return\n if fpath:\n program = os.path.abspath(os.path.expanduser(program))\n if is_exe(program):\n found_path = program\n\n # otherwise, we've just passed in the program name, and we need to search\n # the paths to find where it actually lives\n else:\n paths_to_search = []\n\n if isinstance(paths, (tuple, list)):\n paths_to_search.extend(paths)\n else:\n env_paths = os.environ.get(\"PATH\", \"\").split(os.pathsep)\n paths_to_search.extend(env_paths)\n\n for path in paths_to_search:\n exe_file = os.path.join(path, program)\n if is_exe(exe_file):\n found_path = exe_file\n break\n\n return found_path",
"def Q(name):\n \"\"\"\n Quote a variable name\n\n A way to 'quote' variable names, especially ones that do not otherwise\n meet Python's variable name rules.\n\n Parameters\n ----------\n name : str\n Name of variable\n\n Returns\n -------\n value : object\n Value of variable\n\n Examples\n --------\n >>> import pandas as pd\n >>> from plydata import define\n >>> df = pd.DataFrame({'class': [10, 20, 30]})\n\n Since ``class`` is a reserved python keyword it cannot be a variable\n name, and therefore cannot be used in an expression without quoting it.\n\n >>> df >> define(y='class+1')\n Traceback (most recent call last):\n File \"<string>\", line 1\n class+1\n ^\n SyntaxError: invalid syntax\n\n >>> df >> define(y='Q(\"class\")+1')\n class y\n 0 10 11\n 1 20 21\n 2 30 31\n\n Note that it is ``'Q(\"some name\")'`` and not ``'Q(some name)'``.\n As in the above example, you do not need to ``import`` ``Q`` before\n you can use it.\n \"\"\"\n env = EvalEnvironment.capture(1)\n try:\n return env.namespace[name]\n except KeyError:\n raise NameError(\"No data named {!r} found\".format(name))"
] |
[
0.8366226553916931,
0.8197463750839233,
0.8171188235282898,
0.7880702018737793,
0.7684198021888733,
0.767952561378479,
0.7670852541923523,
0.7655863761901855,
0.7653024792671204,
0.7651069760322571,
0.7628904581069946,
0.7624965906143188
] |
Generates relative path from ``source_path`` to ``target_path``.
Handles the case of paths without a common prefix.
::
>>> import pathlib
>>> source = pathlib.Path('foo/bar/baz')
>>> target = pathlib.Path('foo/quux/biz')
::
>>> target.relative_to(source)
Traceback (most recent call last):
...
ValueError: 'foo/quux/biz' does not start with 'foo/bar/baz'
::
>>> import uqbar.io
>>> str(uqbar.io.relative_to(source, target))
'../../quux/biz'
:param source_path: the source path
:param target_path: the target path
|
def relative_to(
source_path: Union[str, pathlib.Path], target_path: Union[str, pathlib.Path]
) -> pathlib.Path:
"""
Generates relative path from ``source_path`` to ``target_path``.
Handles the case of paths without a common prefix.
::
>>> import pathlib
>>> source = pathlib.Path('foo/bar/baz')
>>> target = pathlib.Path('foo/quux/biz')
::
>>> target.relative_to(source)
Traceback (most recent call last):
...
ValueError: 'foo/quux/biz' does not start with 'foo/bar/baz'
::
>>> import uqbar.io
>>> str(uqbar.io.relative_to(source, target))
'../../quux/biz'
:param source_path: the source path
:param target_path: the target path
"""
source_path = pathlib.Path(source_path).absolute()
if source_path.is_file():
source_path = source_path.parent
target_path = pathlib.Path(target_path).absolute()
common_prefix = find_common_prefix([source_path, target_path])
if not common_prefix:
raise ValueError("No common prefix")
source_path = source_path.relative_to(common_prefix)
target_path = target_path.relative_to(common_prefix)
result = pathlib.Path(*[".."] * len(source_path.parts))
return result / target_path
|
[
"def rellink_to (self, target, force=False):\n \"\"\"Make this path a symlink pointing to the given *target*, generating the\n\tproper relative path using :meth:`make_relative`. This gives different\n\tbehavior than :meth:`symlink_to`. For instance, ``Path\n\t('a/b').symlink_to ('c')`` results in ``a/b`` pointing to the path\n\t``c``, whereas :meth:`rellink_to` results in it pointing to ``../c``.\n\tThis can result in broken relative paths if (continuing the example)\n\t``a`` is a symbolic link to a directory.\n\n\tIf either *target* or *self* is absolute, the symlink will point at\n\tthe absolute path to *target*. The intention is that if you’re trying\n\tto link ``/foo/bar`` to ``bee/boo``, it probably makes more sense for\n\tthe link to point to ``/path/to/.../bee/boo`` rather than\n\t``../../../../bee/boo``.\n\n\tIf *force* is true, :meth:`try_unlink` will be called on *self* before\n\tthe link is made, forcing its re-creation.\n\n \"\"\"\n target = self.__class__ (target)\n\n if force:\n self.try_unlink ()\n\n if self.is_absolute ():\n target = target.absolute () # force absolute link\n\n return self.symlink_to (target.make_relative (self.parent))",
"def relative_path(reference_path, input_path):\n \"\"\"Get the relative path to input_path from reference_path.\n\n :param reference_path: The reference path.\n :type reference_path: str\n\n :param input_path: The input path.\n :type input_path: str\n \"\"\"\n start_path = os.path.dirname(reference_path)\n try:\n relative_path = os.path.relpath(input_path, start_path)\n except ValueError:\n # LOGGER.info(e.message)\n relative_path = input_path\n return relative_path",
"def relative_path_from(path, from)\n pathname(path.to_s).relative_path_from(pathname(from.to_s))\n rescue NoMethodError => e\n raise e unless e.name == :zero?\n\n # Work around https://github.com/ruby/ruby/pull/713.\n path = path.to_s\n from = from.to_s\n raise ArgumentError(\"Incompatible path encodings: #{path.inspect} is #{path.encoding}, \" +\n \"#{from.inspect} is #{from.encoding}\")\n end",
"def relative_path(path, from_file):\n \"\"\"\n Return the relative path of a file or directory, specified\n as ``path`` relative to (the parent directory of) ``from_file``.\n\n This method is intented to be called with ``__file__``\n as second argument.\n\n The returned path is relative to the current working directory.\n\n If ``path`` is ``None``, return ``None``.\n\n Example: ::\n\n path=\"res/foo.bar\"\n from_file=\"/root/abc/def/ghi.py\"\n cwd=\"/root\"\n => \"abc/def/res/foo.bar\"\n\n :param string path: the file path\n :param string from_file: the reference file\n :rtype: string\n \"\"\"\n if path is None:\n return None\n abs_path_target = absolute_path(path, from_file)\n abs_path_cwd = os.getcwd()\n if is_windows():\n # NOTE on Windows, if the two paths are on different drives,\n # the notion of relative path is not defined:\n # return the absolute path of the target instead.\n t_drive, t_tail = os.path.splitdrive(abs_path_target)\n c_drive, c_tail = os.path.splitdrive(abs_path_cwd)\n if t_drive != c_drive:\n return abs_path_target\n return os.path.relpath(abs_path_target, start=abs_path_cwd)",
"def relative_to(base, relativee):\n \"\"\"\n Gets 'relativee' relative to 'basepath'.\n\n i.e.,\n\n >>> relative_to('/home/', '/home/radix/')\n 'radix'\n >>> relative_to('.', '/home/radix/Projects/Twisted') # curdir is /home/radix\n 'Projects/Twisted'\n\n The 'relativee' must be a child of 'basepath'.\n \"\"\"\n basepath = os.path.abspath(base)\n relativee = os.path.abspath(relativee)\n if relativee.startswith(basepath):\n relative = relativee[len(basepath):]\n if relative.startswith(os.sep):\n relative = relative[1:]\n return os.path.join(base, relative)\n raise ValueError(\"%s is not a subpath of %s\" % (relativee, basepath))",
"def normrelpath(base, target):\n \"\"\"\n This function takes the base and target arguments as paths, and\n returns an equivalent relative path from base to the target, if both\n provided paths are absolute.\n \"\"\"\n\n if not all(map(isabs, [base, target])):\n return target\n\n return relpath(normpath(target), dirname(normpath(base)))",
"def _source_roots_for_target(target):\n \"\"\"\n :type target:pants.build_graph.target.Target\n \"\"\"\n def root_package_prefix(source_file):\n source = os.path.dirname(source_file)\n return os.path.join(get_buildroot(), target.target_base, source), source.replace(os.sep, '.')\n return {root_package_prefix(source) for source in target.sources_relative_to_source_root()}",
"def get_converted_relative_path(path, relative_to=None):\n \"\"\"Convert `path` to be relative.\n\n Given a vague relative path, return the path relative to the given\n location.\n\n :param str path: The location of a target path\n :param str relative_to: The starting path to build against, optional\n :returns: A relative posix-style path with a leading `./`\n\n This performs additional conversion to ensure the result is of POSIX form,\n and starts with `./`, or is precisely `.`.\n\n >>> os.chdir('/home/user/code/myrepo/myfolder')\n >>> vistir.path.get_converted_relative_path('/home/user/code/file.zip')\n './../../file.zip'\n >>> vistir.path.get_converted_relative_path('/home/user/code/myrepo/myfolder/mysubfolder')\n './mysubfolder'\n >>> vistir.path.get_converted_relative_path('/home/user/code/myrepo/myfolder')\n '.'\n \"\"\"\n from .misc import to_text, to_bytes # noqa\n\n if not relative_to:\n relative_to = os.getcwdu() if six.PY2 else os.getcwd()\n if six.PY2:\n path = to_bytes(path, encoding=\"utf-8\")\n else:\n path = to_text(path, encoding=\"utf-8\")\n relative_to = to_text(relative_to, encoding=\"utf-8\")\n start_path = Path(relative_to)\n try:\n start = start_path.resolve()\n except OSError:\n start = start_path.absolute()\n\n # check if there is a drive letter or mount point\n # if it is a mountpoint use the original absolute path\n # instead of the unc path\n if check_for_unc_path(start):\n start = start_path.absolute()\n\n path = start.joinpath(path).relative_to(start)\n\n # check and see if the path that was passed into the function is a UNC path\n # and raise value error if it is not.\n if check_for_unc_path(path):\n raise ValueError(\"The path argument does not currently accept UNC paths\")\n\n relpath_s = to_text(posixpath.normpath(path.as_posix()))\n if not (relpath_s == \".\" or relpath_s.startswith(\"./\")):\n relpath_s = posixpath.join(\".\", relpath_s)\n return relpath_s",
"def relative_symlink(source_path, link_path):\n \"\"\"Create a symlink at link_path pointing to relative source\n\n :param source_path: Absolute path to source file\n :param link_path: Absolute path to intended symlink\n :raises ValueError if source_path or link_path are not unique, absolute paths\n :raises OSError on failure UNLESS file already exists or no such file/directory\n \"\"\"\n if not os.path.isabs(source_path):\n raise ValueError(\"Path for source:{} must be absolute\".format(source_path))\n if not os.path.isabs(link_path):\n raise ValueError(\"Path for link:{} must be absolute\".format(link_path))\n if source_path == link_path:\n raise ValueError(\"Path for link is identical to source:{}\".format(source_path))\n # The failure state below had a long life as an uncaught error. No behavior was changed here, it just adds a catch.\n # Raising an exception does differ from absolute_symlink, which takes the liberty of deleting existing directories.\n if os.path.isdir(link_path) and not os.path.islink(link_path):\n raise ValueError(\"Path for link would overwrite an existing directory: {}\".format(link_path))\n try:\n if os.path.lexists(link_path):\n os.unlink(link_path)\n rel_path = os.path.relpath(source_path, os.path.dirname(link_path))\n safe_mkdir_for(link_path)\n os.symlink(rel_path, link_path)\n except OSError as e:\n # Another run may beat us to deletion or creation.\n if not (e.errno == errno.EEXIST or e.errno == errno.ENOENT):\n raise",
"def relative_to(self, *other):\n \"\"\"Return the relative path to another path identified by the passed\n arguments. If the operation is not possible (because this is not\n a subpath of the other path), raise ValueError.\n \"\"\"\n # For the purpose of this method, drive and root are considered\n # separate parts, i.e.:\n # Path('c:/').relative_to('c:') gives Path('/')\n # Path('c:/').relative_to('/') raise ValueError\n if not other:\n raise TypeError(\"need at least one argument\")\n parts = self._parts\n drv = self._drv\n root = self._root\n if root:\n abs_parts = [drv, root] + parts[1:]\n else:\n abs_parts = parts\n to_drv, to_root, to_parts = self._parse_args(other)\n if to_root:\n to_abs_parts = [to_drv, to_root] + to_parts[1:]\n else:\n to_abs_parts = to_parts\n n = len(to_abs_parts)\n cf = self._flavour.casefold_parts\n if (root or drv) if n == 0 else cf(abs_parts[:n]) != cf(to_abs_parts):\n formatted = self._format_parsed_parts(to_drv, to_root, to_parts)\n raise ValueError(\"{!r} does not start with {!r}\"\n .format(str(self), str(formatted)))\n return self._from_parsed_parts('', root if n == 1 else '',\n abs_parts[n:])",
"def relative_symlink(target, link_name):\n \"\"\"Make a symlink to target using the shortest possible relative path.\"\"\"\n link_name = os.path.abspath(link_name)\n abs_target = os.path.abspath(target)\n rel_target = os.path.relpath(target, os.path.dirname(link_name))\n\n if os.path.exists(link_name):\n os.remove(link_name)\n os.symlink(rel_target, link_name)",
"def _resolve_relative_to(path, original_root, new_root):\n \"\"\"\n If the given ``path`` is a relative path, then assume it is relative to ``original_root``. This method will\n update the path to be resolve it relative to ``new_root`` and return.\n\n Examples\n -------\n # Assume a file called template.txt at location /tmp/original/root/template.txt expressed as relative path\n # We are trying to update it to be relative to /tmp/new/root instead of the /tmp/original/root\n >>> result = _resolve_relative_to(\"template.txt\", \\\n \"/tmp/original/root\", \\\n \"/tmp/new/root\")\n >>> result\n ../../original/root/template.txt\n\n Returns\n -------\n Updated path if the given path is a relative path. None, if the path is not a relative path.\n \"\"\"\n\n if not isinstance(path, six.string_types) \\\n or path.startswith(\"s3://\") \\\n or os.path.isabs(path):\n # Value is definitely NOT a relative path. It is either a S3 URi or Absolute path or not a string at all\n return None\n\n # Value is definitely a relative path. Change it relative to the destination directory\n return os.path.relpath(\n os.path.normpath(os.path.join(original_root, path)), # Absolute original path w.r.t ``original_root``\n new_root)"
] |
[
0.7140219211578369,
0.7119200825691223,
0.6999659538269043,
0.6958523392677307,
0.6902969479560852,
0.6895066499710083,
0.6857941150665283,
0.6817877292633057,
0.681742787361145,
0.6782861351966858,
0.6780699491500854,
0.6768007874488831
] |
Walks a directory tree.
Like :py:func:`os.walk` but yielding instances of :py:class:`pathlib.Path`
instead of strings.
:param root_path: foo
:param top_down: bar
|
def walk(
root_path: Union[str, pathlib.Path], top_down: bool = True
) -> Generator[
Tuple[pathlib.Path, Sequence[pathlib.Path], Sequence[pathlib.Path]], None, None
]:
"""
Walks a directory tree.
Like :py:func:`os.walk` but yielding instances of :py:class:`pathlib.Path`
instead of strings.
:param root_path: foo
:param top_down: bar
"""
root_path = pathlib.Path(root_path)
directory_paths, file_paths = [], []
for path in sorted(root_path.iterdir()):
if path.is_dir():
directory_paths.append(path)
else:
file_paths.append(path)
if top_down:
yield root_path, directory_paths, file_paths
for directory_path in directory_paths:
yield from walk(directory_path, top_down=top_down)
if not top_down:
yield root_path, directory_paths, file_paths
|
[
"def _walk(top, topdown=True, onerror=None, followlinks=False):\n \"\"\"Like Python 3.5's implementation of os.walk() -- faster than\n the pre-Python 3.5 version as it uses scandir() internally.\n \"\"\"\n dirs = []\n nondirs = []\n\n # We may not have read permission for top, in which case we can't\n # get a list of the files the directory contains. os.walk\n # always suppressed the exception then, rather than blow up for a\n # minor reason when (say) a thousand readable directories are still\n # left to visit. That logic is copied here.\n try:\n scandir_it = scandir(top)\n except OSError as error:\n if onerror is not None:\n onerror(error)\n return\n\n while True:\n try:\n try:\n entry = next(scandir_it)\n except StopIteration:\n break\n except OSError as error:\n if onerror is not None:\n onerror(error)\n return\n\n try:\n is_dir = entry.is_dir()\n except OSError:\n # If is_dir() raises an OSError, consider that the entry is not\n # a directory, same behaviour than os.path.isdir().\n is_dir = False\n\n if is_dir:\n dirs.append(entry.name)\n else:\n nondirs.append(entry.name)\n\n if not topdown and is_dir:\n # Bottom-up: recurse into sub-directory, but exclude symlinks to\n # directories if followlinks is False\n if followlinks:\n walk_into = True\n else:\n try:\n is_symlink = entry.is_symlink()\n except OSError:\n # If is_symlink() raises an OSError, consider that the\n # entry is not a symbolic link, same behaviour than\n # os.path.islink().\n is_symlink = False\n walk_into = not is_symlink\n\n if walk_into:\n for entry in walk(entry.path, topdown, onerror, followlinks):\n yield entry\n\n # Yield before recursion if going top down\n if topdown:\n yield top, dirs, nondirs\n\n # Recurse into sub-directories\n for name in dirs:\n new_path = join(top, name)\n # Issue #23605: os.path.islink() is used instead of caching\n # entry.is_symlink() result during the loop on os.scandir() because\n # the caller can replace the directory entry during the \"yield\"\n # above.\n if followlinks or not islink(new_path):\n for entry in walk(new_path, topdown, onerror, followlinks):\n yield entry\n else:\n # Yield after recursion if going bottom up\n yield top, dirs, nondirs",
"def walk(self, top, topdown=True, ignore_file_handler=None):\n \"\"\"Directory tree generator.\n\n See `os.walk` for the docs. Differences:\n - no support for symlinks\n - it could raise exceptions, there is no onerror argument\n \"\"\"\n\n def onerror(e):\n raise e\n\n for root, dirs, files in dvc_walk(\n os.path.abspath(top),\n topdown=topdown,\n onerror=onerror,\n ignore_file_handler=ignore_file_handler,\n ):\n yield os.path.normpath(root), dirs, files",
"def walk(self, top, topdown=True, ignore_file_handler=None):\n \"\"\"Directory tree generator.\n\n See `os.walk` for the docs. Differences:\n - no support for symlinks\n - it could raise exceptions, there is no onerror argument\n \"\"\"\n\n tree = self.git_object_by_path(top)\n if tree is None:\n raise IOError(errno.ENOENT, \"No such file\")\n\n for x in self._walk(tree, topdown):\n yield x",
"def walk_tree(self, top=None):\n \"\"\"\n Navigate all the groups in the file starting from top.\n If top is None, the root group is used.\n \"\"\"\n if top is None:\n top = self.rootgrp\n\n values = top.groups.values()\n yield values\n for value in top.groups.values():\n for children in self.walk_tree(value):\n yield children",
"def safe_walk(top, topdown=True, onerror=None, followlinks=True, _seen=None):\n '''\n A clone of the python os.walk function with some checks for recursive\n symlinks. Unlike os.walk this follows symlinks by default.\n '''\n if _seen is None:\n _seen = set()\n\n # We may not have read permission for top, in which case we can't\n # get a list of the files the directory contains. os.path.walk\n # always suppressed the exception then, rather than blow up for a\n # minor reason when (say) a thousand readable directories are still\n # left to visit. That logic is copied here.\n try:\n # Note that listdir and error are globals in this module due\n # to earlier import-*.\n names = os.listdir(top)\n except os.error as err:\n if onerror is not None:\n onerror(err)\n return\n\n if followlinks:\n status = os.stat(top)\n # st_ino is always 0 on some filesystems (FAT, NTFS); ignore them\n if status.st_ino != 0:\n node = (status.st_dev, status.st_ino)\n if node in _seen:\n return\n _seen.add(node)\n\n dirs, nondirs = [], []\n for name in names:\n full_path = os.path.join(top, name)\n if os.path.isdir(full_path):\n dirs.append(name)\n else:\n nondirs.append(name)\n\n if topdown:\n yield top, dirs, nondirs\n for name in dirs:\n new_path = os.path.join(top, name)\n if followlinks or not os.path.islink(new_path):\n for x in safe_walk(new_path, topdown, onerror, followlinks, _seen):\n yield x\n if not topdown:\n yield top, dirs, nondirs",
"def _depth_limited_walk(top, max_depth=None):\n '''\n Walk the directory tree under root up till reaching max_depth.\n With max_depth=None (default), do not limit depth.\n '''\n for root, dirs, files in salt.utils.path.os_walk(top):\n if max_depth is not None:\n rel_depth = root.count(os.path.sep) - top.count(os.path.sep)\n if rel_depth >= max_depth:\n del dirs[:]\n yield (six.text_type(root), list(dirs), list(files))",
"def walk(self, top, topdown=True, onerror=None, followlinks=False):\n \"\"\"Perform a walk operation over the fake filesystem.\n\n Args:\n top: The root directory from which to begin walk.\n topdown: Determines whether to return the tuples with the root as\n the first entry (`True`) or as the last, after all the child\n directory tuples (`False`).\n onerror: If not `None`, function which will be called to handle the\n `os.error` instance provided when `os.listdir()` fails.\n followlinks: If `True`, symbolic links are followed.\n\n Yields:\n (path, directories, nondirectories) for top and each of its\n subdirectories. See the documentation for the builtin os module\n for further details.\n \"\"\"\n return walk(self.filesystem, top, topdown, onerror, followlinks)",
"def walk(self, topdown=True):\n \"\"\"\n Artifact tree generator - analogue of `os.walk`.\n\n :param topdown: if is True or not specified, directories are scanned\n from top-down. If topdown is set to False, directories are scanned\n from bottom-up.\n :rtype: collections.Iterator[\n (str, list[yagocd.resources.artifact.Artifact], list[yagocd.resources.artifact.Artifact])\n ]\n \"\"\"\n return self._manager.walk(top=self._path, topdown=topdown)",
"def scandir_walk(top, skip_dirs=(), on_skip=None):\n \"\"\"\n Just walk the filesystem tree top-down with os.scandir() and don't follow symlinks.\n :param top: path to scan\n :param skip_dirs: List of dir names to skip\n e.g.: \"__pycache__\", \"temp\", \"tmp\"\n :param on_skip: function that will be called if 'skip_dirs' match.\n e.g.:\n def on_skip(entry, pattern):\n log.error(\"Skip pattern %r hit: %s\" % (pattern, entry.path))\n :return: yields os.DirEntry() instances\n \"\"\"\n # We may not have read permission for top, in which case we can't\n # get a list of the files the directory contains. os.walk\n # always suppressed the exception then, rather than blow up for a\n # minor reason when (say) a thousand readable directories are still\n # left to visit. That logic is copied here.\n try:\n scandir_it = Path2(top).scandir()\n except PermissionError as err:\n log.error(\"scandir error: %s\" % err)\n return\n\n for entry in scandir_it:\n if entry.is_dir(follow_symlinks=False):\n if entry.name in skip_dirs:\n on_skip(entry, entry.name)\n else:\n yield from scandir_walk(entry.path, skip_dirs, on_skip)\n else:\n yield entry",
"def walk(self, relpath, topdown=True):\n \"\"\"Walk the file tree rooted at `path`.\n\n Works like os.walk but returned root value is relative path.\n Ignored paths will not be returned.\n \"\"\"\n for root, dirs, files in self._walk_raw(relpath, topdown):\n matched_dirs = self.ignore.match_files([os.path.join(root, \"{}/\".format(d)) for d in dirs])\n matched_files = self.ignore.match_files([os.path.join(root, f) for f in files])\n for matched_dir in matched_dirs:\n dirs.remove(fast_relpath(matched_dir, root).rstrip('/'))\n\n for matched_file in matched_files:\n files.remove(fast_relpath(matched_file, root))\n\n yield root, dirs, files",
"def walk(self, top):\n \"\"\"\n Generate infos for all paths in the tree rooted at ``top`` (included).\n\n The ``top`` parameter can be either an HDFS path string or a\n dictionary of properties as returned by :meth:`get_path_info`.\n\n :type top: str, dict\n :param top: an HDFS path or path info dict\n :rtype: iterator\n :return: path infos of files and directories in the tree rooted at\n ``top``\n :raises: :exc:`~exceptions.IOError`; :exc:`~exceptions.ValueError`\n if ``top`` is empty\n \"\"\"\n if not top:\n raise ValueError(\"Empty path\")\n if isinstance(top, basestring):\n top = self.get_path_info(top)\n yield top\n if top['kind'] == 'directory':\n for info in self.list_directory(top['name']):\n for item in self.walk(info):\n yield item",
"def _walk_directory(root_directory):\n \"\"\"\n Generates the paths of all files that are ancestors\n of `root_directory`.\n \"\"\"\n\n paths = [os.path.join(root, name)\n for root, dirs, files in os.walk(root_directory) # noqa\n for name in files]\n paths.sort()\n return paths"
] |
[
0.7666237354278564,
0.760486900806427,
0.7558243870735168,
0.7528534531593323,
0.7476502060890198,
0.7472764849662781,
0.7448787689208984,
0.7446994781494141,
0.7380245923995972,
0.7364243268966675,
0.734965443611145,
0.733047604560852
] |
Writes ``contents`` to ``path``.
Checks if ``path`` already exists and only write out new contents if the
old contents do not match.
Creates any intermediate missing directories.
:param contents: the file contents to write
:param path: the path to write to
:param verbose: whether to print output
|
def write(
contents: str,
path: Union[str, pathlib.Path],
verbose: bool = False,
logger_func=None,
) -> bool:
"""
Writes ``contents`` to ``path``.
Checks if ``path`` already exists and only write out new contents if the
old contents do not match.
Creates any intermediate missing directories.
:param contents: the file contents to write
:param path: the path to write to
:param verbose: whether to print output
"""
print_func = logger_func or print
path = pathlib.Path(path)
if path.exists():
with path.open("r") as file_pointer:
old_contents = file_pointer.read()
if old_contents == contents:
if verbose:
print_func("preserved {}".format(path))
return False
else:
with path.open("w") as file_pointer:
file_pointer.write(contents)
if verbose:
print_func("rewrote {}".format(path))
return True
elif not path.exists():
if not path.parent.exists():
path.parent.mkdir(parents=True)
with path.open("w") as file_pointer:
file_pointer.write(contents)
if verbose:
print_func("wrote {}".format(path))
return True
|
[
"def write_file(self, path, contents):\n\t\t\"\"\"\n\t\tWrite a file of any type to the destination path. Useful for files like\n\t\trobots.txt, manifest.json, and so on.\n\n\t\tArgs:\n\t\t path (str): The name of the file to write to.\n\t\t contents (str or bytes): The contents to write.\n\t\t\"\"\"\n\t\tpath = self._get_dist_path(path)\n\t\tif not os.path.isdir(os.path.dirname(path)):\n\t\t\tos.makedirs(os.path.dirname(path))\n\t\tif isinstance(contents, bytes):\n\t\t\tmode = 'wb+'\n\t\telse:\n\t\t\tmode = 'w'\n\t\twith open(path, mode) as file:\n\t\t\tfile.write(contents)",
"def write_content(self, content, destination):\n \"\"\"\n Write given content to destination path.\n\n It will create needed directory structure first if it contain some\n directories that does not allready exists.\n\n Args:\n content (str): Content to write to target file.\n destination (str): Destination path for target file.\n\n Returns:\n str: Path where target file has been written.\n \"\"\"\n directory = os.path.dirname(destination)\n\n if directory and not os.path.exists(directory):\n os.makedirs(directory)\n\n with io.open(destination, 'w', encoding='utf-8') as f:\n f.write(content)\n\n return destination",
"def write_file(path, contents):\n\t\"\"\"Write contents to a local file.\"\"\"\n\tos.makedirs(os.path.dirname(path), exist_ok=True)\n\twith open(path, \"w\") as file:\n\t\tfile.write(contents)",
"function writeFile(pathToFile, content, verbose) {\n const directory = path.dirname(pathToFile);\n\n // Create directory if it doesn't exist\n if (!fs.existsSync(directory)) {\n fs.mkdirSync(directory);\n }\n\n // Try to write file to disk to the given location\n fs.writeFile(pathToFile, content, { flag: 'w' }, (error) => {\n if (error) {\n throw new Error(error);\n }\n\n // Log success message to the console if in verbose mode only\n if (verbose) {\n const successMessage = `\\nVersion file written to ${chalk.green(pathToFile)}`;\n\n console.log(successMessage);\n }\n });\n}",
"def make_file(path, contents='', overwrite=False):\n \"\"\"\n Make a file at `path` assuming that the directory it resides in already\n exists. The file is saved with contents `contents`\n \"\"\"\n if overwrite or not os.path.exists(path):\n with open(path, 'w') as fh:\n fh.write(contents)\n return True\n\n return False",
"def write(path, contents, fatal=True, logger=None):\n \"\"\"\n :param str|None path: Path to file\n :param str|None contents: Contents to write\n :param bool|None fatal: Abort execution on failure if True\n :param callable|None logger: Logger to use\n :return int: 1 if effectively done, 0 if no-op, -1 on failure\n \"\"\"\n if not path:\n return 0\n\n if is_dryrun():\n action = \"write %s bytes to\" % len(contents) if contents else \"touch\"\n LOG.debug(\"Would %s %s\", action, short(path))\n return 1\n\n ensure_folder(path, fatal=fatal, logger=logger)\n if logger and contents:\n logger(\"Writing %s bytes to %s\", len(contents), short(path))\n\n try:\n with io.open(path, \"wt\") as fh:\n if contents:\n fh.write(decode(contents))\n else:\n os.utime(path, None)\n return 1\n\n except Exception as e:\n return abort(\"Can't write to %s: %s\", short(path), e, fatal=(fatal, -1))",
"def write_to_file(path, contents, file_type='text'):\n \"\"\"Write ``contents`` to ``path`` with optional formatting.\n\n Small helper function to write ``contents`` to ``file`` with optional formatting.\n\n Args:\n path (str): the path to write to\n contents (str, object, or bytes): the contents to write to the file\n file_type (str, optional): the type of file. Currently accepts\n ``text`` or ``binary`` (contents are unchanged) or ``json`` (contents\n are formatted). Defaults to ``text``.\n\n Raises:\n ScriptWorkerException: with an unknown ``file_type``\n TypeError: if ``file_type`` is ``json`` and ``contents`` isn't JSON serializable\n\n \"\"\"\n FILE_TYPES = ('json', 'text', 'binary')\n if file_type not in FILE_TYPES:\n raise ScriptWorkerException(\"Unknown file_type {} not in {}!\".format(file_type, FILE_TYPES))\n if file_type == 'json':\n contents = format_json(contents)\n if file_type == 'binary':\n with open(path, 'wb') as fh:\n fh.write(contents)\n else:\n with open(path, 'w') as fh:\n print(contents, file=fh, end=\"\")",
"def write_contents_to_file(path, contents=None, link_to=None,\n content_mode='text', root=None, conflicts='fail'):\n \"\"\"\n Uses provided filename patterns to write contents to a new path, given\n a corresponding entity map.\n\n Args:\n path (str): Destination path of the desired contents.\n contents (str): Raw text or binary encoded string of contents to write\n to the new path.\n link_to (str): Optional path with which to create a symbolic link to.\n Used as an alternative to and takes priority over the contents\n argument.\n content_mode (str): Either 'text' or 'binary' to indicate the writing\n mode for the new file. Only relevant if contents is provided.\n root (str): Optional root directory that all patterns are relative\n to. Defaults to current working directory.\n conflicts (str): One of 'fail', 'skip', 'overwrite', or 'append'\n that defines the desired action when the output path already\n exists. 'fail' raises an exception; 'skip' does nothing;\n 'overwrite' overwrites the existing file; 'append' adds a suffix\n to each file copy, starting with 1. Default is 'fail'.\n \"\"\"\n\n if root is None and not isabs(path):\n root = os.getcwd()\n\n if root:\n path = join(root, path)\n\n if exists(path) or islink(path):\n if conflicts == 'fail':\n msg = 'A file at path {} already exists.'\n raise ValueError(msg.format(path))\n elif conflicts == 'skip':\n msg = 'A file at path {} already exists, skipping writing file.'\n logging.warn(msg.format(path))\n return\n elif conflicts == 'overwrite':\n if isdir(path):\n logging.warn('New path is a directory, not going to '\n 'overwrite it, skipping instead.')\n return\n os.remove(path)\n elif conflicts == 'append':\n i = 1\n while i < sys.maxsize:\n path_splits = splitext(path)\n path_splits[0] = path_splits[0] + '_%d' % i\n appended_filename = os.extsep.join(path_splits)\n if not exists(appended_filename) and \\\n not islink(appended_filename):\n path = appended_filename\n break\n i += 1\n else:\n raise ValueError('Did not provide a valid conflicts parameter')\n\n if not exists(dirname(path)):\n os.makedirs(dirname(path))\n\n if link_to:\n os.symlink(link_to, path)\n elif contents:\n mode = 'wb' if content_mode == 'binary' else 'w'\n with open(path, mode) as f:\n f.write(contents)\n else:\n raise ValueError('One of contents or link_to must be provided.')",
"def _write_to_zip(self, path, contents):\n \"\"\" _write_to_zip: Write file to zip\n Args:\n path: (str) where in zip to write file\n contents: (str) contents of file to write\n Returns: None\n \"\"\"\n if isinstance(path, list):\n path = os.path.sep.join(path)\n self.zf.writestr(path, contents)",
"def write_file(path, content, owner='root', group='root', perms=0o444):\n \"\"\"Create or overwrite a file with the contents of a byte string.\"\"\"\n uid = pwd.getpwnam(owner).pw_uid\n gid = grp.getgrnam(group).gr_gid\n # lets see if we can grab the file and compare the context, to avoid doing\n # a write.\n existing_content = None\n existing_uid, existing_gid, existing_perms = None, None, None\n try:\n with open(path, 'rb') as target:\n existing_content = target.read()\n stat = os.stat(path)\n existing_uid, existing_gid, existing_perms = (\n stat.st_uid, stat.st_gid, stat.st_mode\n )\n except Exception:\n pass\n if content != existing_content:\n log(\"Writing file {} {}:{} {:o}\".format(path, owner, group, perms),\n level=DEBUG)\n with open(path, 'wb') as target:\n os.fchown(target.fileno(), uid, gid)\n os.fchmod(target.fileno(), perms)\n if six.PY3 and isinstance(content, six.string_types):\n content = content.encode('UTF-8')\n target.write(content)\n return\n # the contents were the same, but we might still need to change the\n # ownership or permissions.\n if existing_uid != uid:\n log(\"Changing uid on already existing content: {} -> {}\"\n .format(existing_uid, uid), level=DEBUG)\n os.chown(path, uid, -1)\n if existing_gid != gid:\n log(\"Changing gid on already existing content: {} -> {}\"\n .format(existing_gid, gid), level=DEBUG)\n os.chown(path, -1, gid)\n if existing_perms != perms:\n log(\"Changing permissions on existing content: {} -> {}\"\n .format(existing_perms, perms), level=DEBUG)\n os.chmod(path, perms)",
"def write(path, *args, **kwargs):\n '''\n .. versionadded:: 2014.7.0\n\n Write text to a file, overwriting any existing contents.\n\n path\n path to file\n\n `*args`\n strings to write to the file\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' file.write /etc/motd \\\\\n \"With all thine offerings thou shalt offer salt.\"\n\n .. admonition:: Attention\n\n If you need to pass a string to append and that string contains\n an equal sign, you **must** include the argument name, args.\n For example:\n\n .. code-block:: bash\n\n salt '*' file.write /etc/motd args='cheese=spam'\n\n salt '*' file.write /etc/motd args=\"['cheese=spam','spam=cheese']\"\n\n '''\n path = os.path.expanduser(path)\n\n if 'args' in kwargs:\n if isinstance(kwargs['args'], list):\n args = kwargs['args']\n else:\n args = [kwargs['args']]\n\n contents = []\n for line in args:\n contents.append('{0}\\n'.format(line))\n with salt.utils.files.fopen(path, \"w\") as ofile:\n ofile.write(salt.utils.stringutils.to_str(''.join(contents)))\n return 'Wrote {0} lines to \"{1}\"'.format(len(contents), path)",
"def _file_write(path, content):\n '''\n Write content to a file\n '''\n with salt.utils.files.fopen(path, 'w+') as fp_:\n fp_.write(salt.utils.stringutils.to_str(content))\n fp_.close()"
] |
[
0.7112444639205933,
0.6992592811584473,
0.6982851624488831,
0.6905460953712463,
0.6884069442749023,
0.6825169324874878,
0.6800003051757812,
0.6771378517150879,
0.671228289604187,
0.6689144968986511,
0.6681013107299805,
0.6621887683868408
] |
Pretty object reference using ``module.path:qual.name`` format
|
def pretty_ref(obj: Any) -> str:
"""Pretty object reference using ``module.path:qual.name`` format"""
try:
return obj.__module__ + ':' + obj.__qualname__
except AttributeError:
return pretty_ref(type(obj)) + '(...)'
|
[
"def qualified_name(self):\n '''return the fully qualified name (`<module>.<name>`)'''\n if self.module == self:\n return self.module.name\n else:\n if \".\" not in self.name:\n return '{0}.{1}'.format(self.module.name, self.name)\n else:\n # We have a fully qualified reference, just return it\n return self.name",
"def qualified_name(obj):\n '''Returns the fully-qualified name of the given object'''\n if not hasattr(obj, '__module__'):\n obj = obj.__class__\n module = obj.__module__\n if module is None or module == str.__class__.__module__:\n return obj.__qualname__\n return '{}.{}'.format(module, obj.__qualname__)",
"def qual(obj):\n \"\"\"\n Return fully qualified name of a class.\n \"\"\"\n return u'{}.{}'.format(obj.__class__.__module__, obj.__class__.__name__)",
"def qualified_name(obj):\n \"\"\"Return the qualified name (e.g. package.module.Type) for the given object.\"\"\"\n try:\n module = obj.__module__\n qualname = obj.__qualname__\n except AttributeError:\n type_ = type(obj)\n module = type_.__module__\n qualname = type_.__qualname__\n\n return qualname if module in ('typing', 'builtins') else '{}.{}'.format(module, qualname)",
"def qualified_name(obj) -> str:\n \"\"\"\n Return the qualified name (e.g. package.module.Type) for the given object.\n\n If ``obj`` is not a class, the returned name will match its type instead.\n\n \"\"\"\n if not isclass(obj):\n obj = type(obj)\n\n if obj.__module__ == 'builtins':\n return obj.__name__\n else:\n return '{}.{}'.format(obj.__module__, obj.__qualname__)",
"def qualname(thing):\n \"\"\"Return the dot name for a given thing.\n\n >>> import everett.manager\n >>> qualname(str)\n 'str'\n >>> qualname(everett.manager.parse_class)\n 'everett.manager.parse_class'\n >>> qualname(everett.manager)\n 'everett.manager'\n\n \"\"\"\n parts = []\n\n # Add the module, unless it's a builtin\n mod = inspect.getmodule(thing)\n if mod and mod.__name__ not in ('__main__', '__builtin__', 'builtins'):\n parts.append(mod.__name__)\n\n # If there's a __qualname__, use that\n if hasattr(thing, '__qualname__'):\n parts.append(thing.__qualname__)\n return '.'.join(parts)\n\n # If it's a module\n if inspect.ismodule(thing):\n return '.'.join(parts)\n\n # If it's a class\n if inspect.isclass(thing):\n parts.append(thing.__name__)\n return '.'.join(parts)\n\n # If it's a function\n if isinstance(thing, (types.FunctionType, types.MethodType)):\n # If it's a method or function\n if inspect.ismethod(thing):\n if thing.im_class is type:\n # This is a class method\n parts.append(thing.im_self.__name__)\n else:\n # This is an bound/instance method\n parts.append(thing.im_class.__name__)\n parts.append(thing.__name__)\n\n elif inspect.isfunction(thing):\n parts.append(thing.__name__)\n\n return '.'.join(parts)\n\n # It's an instance, so ... let's call repr on it\n return repr(thing)",
"def qualified_class_name(o):\n \"\"\"Full name of an object, including the module\"\"\"\n module = o.__class__.__module__\n if module is None or module == str.__class__.__module__:\n return o.__class__.__name__\n return module + '.' + o.__class__.__name__",
"def filter_pyfqn(cls, value, relative_to=0):\n \"\"\"\n Returns Python form of fully qualified name.\n\n Args:\n relative_to: If greater 0, the returned path is relative to the first n directories.\n \"\"\"\n\n def collect_packages(element, packages):\n parent = element.eContainer()\n if parent:\n collect_packages(parent, packages)\n packages.append(element.name)\n\n packages = []\n collect_packages(value, packages)\n\n if relative_to < 0 or relative_to > len(packages):\n raise ValueError('relative_to not in range of number of packages')\n\n fqn = '.'.join(packages[relative_to:])\n\n if relative_to:\n fqn = '.' + fqn\n\n return cls.module_path_map.get(fqn, fqn)",
"def path_to_reference(path):\n \"\"\"Convert an object path reference to a reference.\"\"\"\n\n # By default JSON decodes strings as unicode. The Python __import__ does\n # not like that choice. So we'll just cast all function paths to a string.\n # NOTE: that there is no corresponding unit test for the classmethod\n # version of this problem. It only impacts importing modules.\n path = str(path)\n\n if '.' not in path:\n try:\n return globals()[\"__builtins__\"][path]\n except KeyError:\n try:\n return getattr(globals()[\"__builtins__\"], path)\n except AttributeError:\n pass\n\n try:\n return globals()[path]\n except KeyError:\n pass\n\n raise errors.BadObjectPathError(\n 'Unable to find function \"%s\".' % (path,))\n\n module_path, function_name = path.rsplit('.', 1)\n\n try:\n module = __import__(name=module_path,\n fromlist=[function_name])\n except ImportError:\n module_path, class_name = module_path.rsplit('.', 1)\n\n module = __import__(name=module_path, fromlist=[class_name])\n module = getattr(module, class_name)\n\n try:\n return getattr(module, function_name)\n except AttributeError:\n raise errors.BadObjectPathError(\n 'Unable to find function \"%s\".' % (path,))",
"def qualified_name(cls):\n \"\"\"Full name of a class, including the module. Like qualified_class_name, but when you already have a class \"\"\"\n module = cls.__module__\n if module is None or module == str.__class__.__module__:\n return cls.__name__\n return module + '.' + cls.__name__",
"def get_qualified_name(_object):\n \"\"\"Return the Fully Qualified Name from an instance or class.\"\"\"\n module = _object.__module__\n if hasattr(_object, '__name__'):\n _class = _object.__name__\n\n else:\n _class = _object.__class__.__name__\n\n return module + '.' + _class",
"def modurl(qualname):\n \"\"\"Get the full GitHub URL for some object’s qualname.\"\"\"\n obj, module = get_obj_module(qualname)\n github_url = github_url1\n try:\n path = PurePosixPath(Path(module.__file__).resolve().relative_to(project_dir))\n except ValueError:\n # trying to document something from another package\n github_url = github_url2\n path = '/'.join(module.__file__.split('/')[-2:])\n start, end = get_linenos(obj)\n fragment = '#L{}-L{}'.format(start, end) if start and end else ''\n return '{}/{}{}'.format(github_url, path, fragment)"
] |
[
0.7537909150123596,
0.7496721148490906,
0.7459216117858887,
0.7329827547073364,
0.72868412733078,
0.7268847227096558,
0.7227492332458496,
0.719032883644104,
0.7084441184997559,
0.7068862318992615,
0.7022971510887146,
0.7020387649536133
] |
Display repo github path
|
def remote(ctx):
"""Display repo github path
"""
with command():
m = RepoManager(ctx.obj['agile'])
click.echo(m.github_repo().repo_path)
|
[
"def repository(name):\n \"\"\"Display selected repository.\"\"\"\n user_id = current_user.id\n github = GitHubAPI(user_id=user_id)\n token = github.session_token\n\n if token:\n repos = github.account.extra_data.get('repos', [])\n repo = next((repo for repo_id, repo in repos.items()\n if repo.get('full_name') == name), {})\n if not repo:\n abort(403)\n\n try:\n # NOTE: Here we do not check for repository ownership, since it\n # might have changed even though the user might have made releases\n # in the past.\n repo_instance = Repository.get(user_id=user_id,\n github_id=repo['id'],\n check_owner=False)\n except RepositoryAccessError:\n abort(403)\n except NoResultFound:\n repo_instance = Repository(name=repo['full_name'],\n github_id=repo['id'])\n\n releases = [\n current_github.release_api_class(r) for r in (\n repo_instance.releases.order_by(db.desc(Release.created)).all()\n if repo_instance.id else []\n )\n ]\n return render_template(\n current_app.config['GITHUB_TEMPLATE_VIEW'],\n repo=repo_instance,\n releases=releases,\n serializer=current_github.record_serializer,\n )\n\n abort(403)",
"def index():\n \"\"\"Display list of the user's repositories.\"\"\"\n github = GitHubAPI(user_id=current_user.id)\n token = github.session_token\n ctx = dict(connected=False)\n\n if token:\n # The user is authenticated and the token we have is still valid.\n if github.account.extra_data.get('login') is None:\n github.init_account()\n db.session.commit()\n\n # Sync if needed\n if request.method == 'POST' or github.check_sync():\n # When we're in an XHR request, we want to synchronously sync hooks\n github.sync(async_hooks=(not request.is_xhr))\n db.session.commit()\n\n # Generate the repositories view object\n extra_data = github.account.extra_data\n repos = extra_data['repos']\n if repos:\n # 'Enhance' our repos dict, from our database model\n db_repos = Repository.query.filter(\n Repository.github_id.in_([int(k) for k in repos.keys()]),\n ).all()\n for repo in db_repos:\n repos[str(repo.github_id)]['instance'] = repo\n repos[str(repo.github_id)]['latest'] = GitHubRelease(\n repo.latest_release())\n\n last_sync = humanize.naturaltime(\n (utcnow() - parse_timestamp(extra_data['last_sync'])))\n\n ctx.update({\n 'connected': True,\n 'repos': sorted(repos.items(), key=lambda x: x[1]['full_name']),\n 'last_sync': last_sync,\n })\n\n return render_template(current_app.config['GITHUB_TEMPLATE_INDEX'], **ctx)",
"function githubRepo (options) {\n try {\n const url = options.data.root.package.repository.url\n const match = url.match(/.*?(:\\/\\/|@)github\\.com[/:](.*?)(#.*?)?$/)\n if (match) {\n return match[2].replace(/\\.git$/, '')\n } else {\n return null\n }\n } catch (e) {\n // No repositor-url exists\n return null\n }\n}",
"def _get_repo():\n \"\"\"Identify the path to the repository origin.\"\"\"\n command = ['git', 'rev-parse', '--show-toplevel']\n if six.PY2:\n try:\n return check_output(command).decode('utf-8').strip() # nosec\n except CalledProcessError:\n return ''\n else:\n return (run(command, stdout=PIPE, stderr=PIPE)\n .stdout.decode('utf-8').strip())",
"def view(self, repo):\n \"\"\"\n View repository information\n \"\"\"\n status = \"{0}disabled{1}\".format(self.meta.color[\"RED\"],\n self.meta.color[\"ENDC\"])\n self.form[\"Status:\"] = status\n self.form[\"Default:\"] = \"no\"\n if repo in self.meta.default_repositories:\n self.form[\"Default:\"] = \"yes\"\n if (repo in self.meta.repositories and\n os.path.isfile(self.meta.lib_path + \"{0}_repo/PACKAGES.\"\n \"TXT\".format(repo))):\n status = \"{0}enabled{1}\".format(self.meta.color[\"GREEN\"],\n self.meta.color[\"ENDC\"])\n if repo != \"sbo\":\n data = self.repository_data(repo)\n size = units(data[1], data[2])\n self.form[\"Repo id:\"] = repo\n self.form[\"Repo url:\"] = self.all_repos[repo]\n self.form[\"Total compressed packages:\"] = \"{0} {1}\".format(\n str(size[1][0]), str(size[0][0]))\n self.form[\"Total uncompressed packages:\"] = \"{0} {1}\".format(\n str(size[1][1]), str(size[0][1]))\n self.form[\"Number of packages:\"] = data[0]\n self.form[\"Status:\"] = status\n self.form[\"Last updated:\"] = data[3]\n elif (repo == \"sbo\" and os.path.isfile(self.meta.lib_path + \"{0}_repo/\"\n \"SLACKBUILDS.TXT\".format(repo))):\n status = \"{0}enabled{1}\".format(self.meta.color[\"GREEN\"],\n self.meta.color[\"ENDC\"])\n sum_sbo_pkgs = 0\n for line in (Utils().read_file(\n self.meta.lib_path + \"sbo_repo/SLACKBUILDS.\"\n \"TXT\").splitlines()):\n if line.startswith(\"SLACKBUILD NAME: \"):\n sum_sbo_pkgs += 1\n changelog_txt = Utils().read_file(\n self.meta.log_path + \"sbo/ChangeLog.txt\")\n last_upd = changelog_txt.split(\"\\n\", 1)[0]\n self.form[\"Repo id:\"] = repo\n self.form[\"Repo url:\"] = self.all_repos[repo]\n self.form[\"Total compressed packages:\"] = \"\"\n self.form[\"Total uncompressed packages:\"] = \"\"\n self.form[\"Number of packages:\"] = sum_sbo_pkgs\n self.form[\"Status:\"] = status\n self.form[\"Last updated:\"] = last_upd\n print(\"\")\n for key, value in sorted(self.form.iteritems()):\n print self.meta.color[\"GREY\"] + key + self.meta.color[\"ENDC\"], value\n print(\"\")\n raise SystemExit()",
"public void show(URL gitReportsBaseURL, String userName, String repoName) {\n String windowTitle = null;\n show(windowTitle, gitReportsBaseURL, userName, repoName);\n }",
"private function display_ghu_repos( $git ) {\n\t\t$lock_title = esc_html__( 'This is a private repository.', 'github-updater' );\n\t\t$broken_title = esc_html__( 'This repository has not connected to the API or was unable to connect.', 'github-updater' );\n\t\t$dot_org_title = esc_html__( 'This repository is hosted on WordPress.org.', 'github-updater' );\n\n\t\t$plugins = Singleton::get_instance( 'Plugin', $this )->get_plugin_configs();\n\t\t$themes = Singleton::get_instance( 'Theme', $this )->get_theme_configs();\n\t\t$repos = array_merge( $plugins, $themes );\n\t\t$bbserver = [ 'bitbucket', 'bbserver' ];\n\n\t\t$type_repos = array_filter(\n\t\t\t$repos,\n\t\t\tfunction ( $e ) use ( $git, $bbserver ) {\n\t\t\t\tif ( ! empty( $e->enterprise ) && in_array( $git, $bbserver, true ) ) {\n\t\t\t\t\treturn false !== stripos( $e->git, 'bitbucket' ) && 'bbserver' === $git;\n\t\t\t\t}\n\n\t\t\t\treturn false !== stripos( $e->git, $git );\n\t\t\t}\n\t\t);\n\n\t\t$display_data = array_map(\n\t\t\tfunction ( $e ) {\n\t\t\t\treturn [\n\t\t\t\t\t'type' => $e->type,\n\t\t\t\t\t'slug' => $e->slug,\n\t\t\t\t\t'file' => isset( $e->file ) ? $e->file : $e->slug,\n\t\t\t\t\t'branch' => $e->branch,\n\t\t\t\t\t'name' => $e->name,\n\t\t\t\t\t'private' => isset( $e->is_private ) ? $e->is_private : false,\n\t\t\t\t\t'broken' => ! isset( $e->remote_version ) || '0.0.0' === $e->remote_version,\n\t\t\t\t\t'dot_org' => isset( $e->dot_org ) ? $e->dot_org : false,\n\t\t\t\t];\n\t\t\t},\n\t\t\t$type_repos\n\t\t);\n\n\t\t$lock = ' <span title=\"' . $lock_title . '\" class=\"dashicons dashicons-lock\"></span>';\n\t\t$broken = ' <span title=\"' . $broken_title . '\" style=\"color:#f00;\" class=\"dashicons dashicons-warning\"></span>';\n\t\t$dot_org = ' <span title=\"' . $dot_org_title . '\" class=\"dashicons dashicons-wordpress\"></span></span>';\n\t\tprintf( '<h2>' . esc_html__( 'Installed Plugins and Themes', 'github-updater' ) . '</h2>' );\n\t\tforeach ( $display_data as $data ) {\n\t\t\t$dashicon = false !== strpos( $data['type'], 'theme' )\n\t\t\t\t? '<span class=\"dashicons dashicons-admin-appearance\"></span> '\n\t\t\t\t: '<span class=\"dashicons dashicons-admin-plugins\"></span> ';\n\t\t\t$is_private = $data['private'] ? $lock : null;\n\t\t\t$is_broken = $data['broken'] ? $broken : null;\n\t\t\t$override = $this->override_dot_org( $data['type'], $data );\n\t\t\t$is_dot_org = $data['dot_org'] && ! $override ? $dot_org : null;\n\t\t\tprintf( '<p>' . $dashicon . $data['name'] . $is_private . $is_dot_org . $is_broken . '</p>' );\n\t\t}\n\t}",
"def do_show(self, repo):\n '''\n List repo attributes\n '''\n self.abort_on_nonexisting_effective_repo(repo, 'show')\n\n repo = self.network.get_repo(repo)\n repo.print_attributes()",
"async function repoUrl(execaOpts) {\n try {\n return await execa.stdout('git', ['config', '--get', 'remote.origin.url'], execaOpts);\n } catch (error) {\n debug(error);\n }\n}",
"public void show(String windowTitle, String userName, String repoName, Throwable e) {\n show(windowTitle, defaultGitReportsURL, userName, repoName, e);\n }",
"def repo_root\n return git_directory_path if inside_bare_repo?\n \n if inside_git_directory?\n File.expand_path('..', git_directory_path)\n elsif inside_work_tree?\n execute(git_cmd('rev-parse', '--show-toplevel'))\n else\n raise 'not in git directory or work tree!?'\n end\n end",
"public void show(String windowTitle, String userName, String repoName) {\n show(windowTitle, defaultGitReportsURL, userName, repoName);\n }"
] |
[
0.7642771601676941,
0.7277076244354248,
0.7231431603431702,
0.7152695655822754,
0.7143361568450928,
0.7138631939888,
0.7125160694122314,
0.7093775868415833,
0.7087344527244568,
0.7085931301116943,
0.7023402452468872,
0.701813280582428
] |
Get graph-order tuple for node.
::
>>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode
>>> root_container = UniqueTreeContainer(name="root")
>>> outer_container = UniqueTreeContainer(name="outer")
>>> inner_container = UniqueTreeContainer(name="inner")
>>> node_a = UniqueTreeNode(name="a")
>>> node_b = UniqueTreeNode(name="b")
>>> node_c = UniqueTreeNode(name="c")
>>> node_d = UniqueTreeNode(name="d")
>>> root_container.extend([node_a, outer_container])
>>> outer_container.extend([inner_container, node_d])
>>> inner_container.extend([node_b, node_c])
::
>>> for node in root_container.depth_first():
... print(node.name, node.graph_order)
...
a (0,)
outer (1,)
inner (1, 0)
b (1, 0, 0)
c (1, 0, 1)
d (1, 1)
|
def graph_order(self):
"""
Get graph-order tuple for node.
::
>>> from uqbar.containers import UniqueTreeContainer, UniqueTreeNode
>>> root_container = UniqueTreeContainer(name="root")
>>> outer_container = UniqueTreeContainer(name="outer")
>>> inner_container = UniqueTreeContainer(name="inner")
>>> node_a = UniqueTreeNode(name="a")
>>> node_b = UniqueTreeNode(name="b")
>>> node_c = UniqueTreeNode(name="c")
>>> node_d = UniqueTreeNode(name="d")
>>> root_container.extend([node_a, outer_container])
>>> outer_container.extend([inner_container, node_d])
>>> inner_container.extend([node_b, node_c])
::
>>> for node in root_container.depth_first():
... print(node.name, node.graph_order)
...
a (0,)
outer (1,)
inner (1, 0)
b (1, 0, 0)
c (1, 0, 1)
d (1, 1)
"""
parentage = tuple(reversed(self.parentage))
graph_order = []
for i in range(len(parentage) - 1):
parent, child = parentage[i : i + 2]
graph_order.append(parent.index(child))
return tuple(graph_order)
|
[
"def postorder(self):\n \"\"\"\n Returns a valid postorder of the **node objects** of the ``DictGraph`` \n *if* the topology is a directed acyclic graph. This postorder is \n semi-random, because the order of elements in a dictionary is \n semi-random and so are the starting nodes of the depth-first search \n traversal, which determines the postorder, consequently some postorders\n will be discovered more frequently.\n \n This postorder enforces some determinism on particular ties:\n \n - toplogically equivalent branches come first are sorted by length\n (shorter branches come first).\n - if the topological Nodes corresponding to the node objects have\n a ``\"branch\"`` attribute it will be used to sort the graph from \n left to right.\n \n However the final postorder is still *not* deterministic.\n \n \"\"\"\n nodes_random = self.nodes()\n # for debugging we could make it more random;)\n # from random import shuffle\n # shuffle(nodes_random)\n # 1. sort branches\n nodes_by_branch = sorted(nodes_random, cmp=self.cmp_branch)\n # 1. topological sort\n nodes_topological = []\n for node in nodes_by_branch:\n self.dfs(node, nodes_topological)\n self.clear_nodes()\n # 2. earthworm sort\n nodes_consecutive = []\n for node in nodes_topological:\n Node = self[node]\n outgoing_nodes = Node.nodes()\n if outgoing_nodes:\n last_index = max([nodes_consecutive.index(on) for on in \\\n outgoing_nodes])\n nodes_consecutive.insert(last_index + 1, node)\n else:\n nodes_consecutive.append(node)\n return nodes_consecutive",
"def traverse_levelorder(self, leaves=True, internal=True):\n '''Perform a levelorder traversal of the ``Node`` objects in this ``Tree``'''\n for node in self.root.traverse_levelorder(leaves=leaves, internal=internal):\n yield node",
"def get_topological_order(self, cfg_node):\n \"\"\"\n Get the topological order of a CFG Node.\n\n :param cfg_node: A CFGNode instance.\n :return: An integer representing its order, or None if the CFGNode does not exist in the graph.\n \"\"\"\n\n if not self._quasi_topological_order:\n self._quasi_topological_sort()\n\n return self._quasi_topological_order.get(cfg_node, None)",
"def topological(nodes):\n \"\"\"Return nodes in a topological order.\"\"\"\n order, enter, state = deque(), set(nodes), {}\n\n def dfs(node):\n \"\"\"Visit nodes in depth-first order.\"\"\"\n state[node] = GRAY\n for parent in nodes.get(node, ()):\n color = state.get(parent, None)\n if color == GRAY:\n raise ValueError('cycle')\n if color == BLACK:\n continue\n enter.discard(parent)\n dfs(parent)\n order.appendleft(node)\n state[node] = BLACK\n\n while enter:\n dfs(enter.pop())\n\n return order",
"def in_order_traverse(self):\n \"\"\"\n In-order traversal of the tree\n \"\"\"\n result = []\n\n if not self.node:\n return result\n\n result.extend(self.node.left.in_order_traverse())\n result.append(self.node.key)\n result.extend(self.node.right.in_order_traverse())\n return result",
"def ipreorder(self):\n '''Depth-first pre-order iteration of tree nodes'''\n children = deque((self, ))\n while children:\n cur_node = children.pop()\n children.extend(reversed(cur_node.children))\n yield cur_node",
"def depth_first_search(graph, root_node=None):\n \"\"\"Searches through the tree in a breadth-first fashion.\n If root_node is None, an arbitrary node will be used as the root.\n If root_node is not None, it will be used as the root for the search tree.\n Returns a list of nodes, in the order that they were reached.\n \"\"\"\n ordering, parent_lookup, children_lookup = depth_first_search_with_parent_data(graph, root_node)\n return ordering",
"def traverse_levelorder(self, leaves=True, internal=True):\n '''Perform a levelorder traversal starting at this ``Node`` object\n\n Args:\n ``leaves`` (``bool``): ``True`` to include leaves, otherwise ``False``\n\n ``internal`` (``bool``): ``True`` to include internal nodes, otherwise ``False``\n '''\n q = deque(); q.append(self)\n while len(q) != 0:\n n = q.popleft()\n if (leaves and n.is_leaf()) or (internal and not n.is_leaf()):\n yield n\n q.extend(n.children)",
"def level_order(tree, include_all=False):\n \"\"\" Returns an iterator over the tree in level-order\n\n If include_all is set to True, empty parts of the tree are filled\n with dummy entries and the iterator becomes infinite. \"\"\"\n\n q = deque()\n q.append(tree)\n while q:\n node = q.popleft()\n yield node\n\n if include_all or node.left:\n q.append(node.left or node.__class__())\n\n if include_all or node.right:\n q.append(node.right or node.__class__())",
"def _get_dependency_order(g, node_list):\n \"\"\"Return list of nodes as close as possible to the ordering in node_list,\n but with child nodes earlier in the list than parents.\"\"\"\n access_ = accessibility(g)\n deps = dict((k, set(v) - set([k])) for k, v in access_.iteritems())\n nodes = node_list + list(set(g.nodes()) - set(node_list))\n ordered_nodes = []\n\n while nodes:\n n_ = nodes[0]\n n_deps = deps.get(n_)\n if (n_ in ordered_nodes) or (n_deps is None):\n nodes = nodes[1:]\n continue\n\n moved = False\n for i, n in enumerate(nodes[1:]):\n if n in n_deps:\n nodes = [nodes[i + 1]] + nodes[:i + 1] + nodes[i + 2:]\n moved = True\n break\n\n if not moved:\n ordered_nodes.append(n_)\n nodes = nodes[1:]\n\n return ordered_nodes",
"def visit_Tuple(self, node):\n '''\n A tuple is abstracted as an ordered container of its values\n\n >>> from pythran import passmanager\n >>> pm = passmanager.PassManager('demo')\n >>> module = ast.parse('def foo(a, b): return a, b')\n >>> result = pm.gather(Aliases, module)\n >>> Aliases.dump(result, filter=ast.Tuple)\n (a, b) => ['|[0]=a|', '|[1]=b|']\n\n where the |[i]=id| notation means something that\n may contain ``id`` at index ``i``.\n '''\n if node.elts:\n elts_aliases = set()\n for i, elt in enumerate(node.elts):\n elt_aliases = self.visit(elt)\n elts_aliases.update(ContainerOf(alias, i)\n for alias in elt_aliases)\n else:\n elts_aliases = None\n return self.add(node, elts_aliases)",
"def ipostorder(self):\n '''Depth-first post-order iteration of tree nodes'''\n children = [self, ]\n seen = set()\n while children:\n cur_node = children[-1]\n if cur_node not in seen:\n seen.add(cur_node)\n children.extend(reversed(cur_node.children))\n else:\n children.pop()\n yield cur_node"
] |
[
0.6900789737701416,
0.6887165307998657,
0.6883143782615662,
0.6868157982826233,
0.6808519959449768,
0.6807454228401184,
0.6807418465614319,
0.6763340830802917,
0.665793776512146,
0.6647040247917175,
0.6604621410369873,
0.6588199734687805
] |
Send command, wait for response (single or multi lines), test for errors and return the returned code.
:param cmd: command to send
:param multilines: True - multiline response, False - single line response.
:return: command return value.
|
def sendQuery(self, cmd, multilines=False):
""" Send command, wait for response (single or multi lines), test for errors and return the returned code.
:param cmd: command to send
:param multilines: True - multiline response, False - single line response.
:return: command return value.
"""
self.logger.debug("sendQuery(%s)", cmd)
if not self.is_connected():
raise socket.error("sendQuery on a disconnected socket")
if multilines:
replies = self.__sendQueryReplies(cmd)
for reply in replies:
if reply.startswith(XenaSocket.reply_errors):
raise XenaCommandException('sendQuery({}) reply({})'.format(cmd, replies))
self.logger.debug("sendQuery(%s) -- Begin", cmd)
for l in replies:
self.logger.debug("%s", l.strip())
self.logger.debug("sendQuery(%s) -- End", cmd)
return replies
else:
reply = self.__sendQueryReply(cmd)
if reply.startswith(XenaSocket.reply_errors):
raise XenaCommandException('sendQuery({}) reply({})'.format(cmd, reply))
self.logger.debug('sendQuery(%s) reply(%s)', cmd, reply)
return reply
|
[
"def send_command_return_multilines(self, command, *arguments):\n \"\"\" Send command and wait for multiple lines output. \"\"\"\n return self.api.send_command_return_multilines(self, command, *arguments)",
"def sendcmd(self, cmd='AT', timeout=1.0):\n \"\"\"send command, wait for response. returns response from modem.\"\"\"\n import time\n if self.write(cmd):\n while self.get_response() == '' and timeout > 0:\n time.sleep(0.1)\n timeout -= 0.1\n return self.get_lines()",
"def send_command_return_multilines(self, obj, command, *arguments):\n \"\"\" Send command with no output.\n\n :param obj: requested object.\n :param command: command to send.\n :param arguments: list of command arguments.\n :return: list of command output lines.\n :rtype: list(str)\n \"\"\"\n return self._perform_command('{}/{}'.format(self.session_url, obj.ref), command,\n OperReturnType.multiline_output, *arguments).json()",
"def sendline(self, cmd):\n '''\n Send this command to the server and\n return a tuple of the output and the stderr.\n\n The format for parameters is:\n\n cmd (string): The command to send to the sever.\n '''\n self.conn.sendline(cmd, self.linesep)\n\n # saw_prompt = False\n ret_stdout = []\n ret_stderr = []\n while self.conn.has_unread_data:\n stdout, stderr = self.conn.recv()\n\n if stdout:\n ret_stdout.append(stdout)\n if stderr:\n log.debug('Error while executing command.')\n ret_stderr.append(stderr)\n\n if stdout and self.prompt_re.search(stdout):\n break\n\n return ''.join(ret_stdout), ''.join(ret_stderr)",
"def send_command_return_multilines(self, obj, command, *arguments):\n \"\"\" Send command and wait for multiple lines output. \"\"\"\n index_command = obj._build_index_command(command, *arguments)\n return self.chassis_list[obj.chassis].sendQuery(index_command, True)",
"def send_command_return(self, command, *arguments):\n \"\"\" Send command and wait for single line output. \"\"\"\n return self.api.send_command_return(self, command, *arguments)",
"def command(self, cmd, expected_retcode=0): # pylint: disable=invalid-name\n # expected_retcode kwd argument is used in many test cases, we cannot rename it.\n \"\"\"\n Shortcut for sending a command to this node specifically.\n :param cmd: Command to send\n :param expected_retcode: Expected return code as int, default is 0\n :return: CliResponse\n \"\"\"\n return self.bench.execute_command(self.endpoint_id, cmd, expected_retcode=expected_retcode)",
"def multiline_push(self, code, lines):\n \"\"\"Send a multi-message to the peer (using the correct SMTP line\n terminators (usually only called from the SMTPSession).\"\"\"\n for line in lines[:-1]:\n answer = '%s-%s' % (code, line)\n self.push(answer)\n self.push(code, lines[-1])",
"def _send_command(self, cmd, expect=None):\n \"\"\"Send a command to MPlayer.\n\n cmd: the command string\n expect: expect the output starts with a certain string\n The result, if any, is returned as a string.\n \"\"\"\n if not self.is_alive:\n raise NotPlayingError()\n logger.debug(\"Send command to mplayer: \" + cmd)\n cmd = cmd + \"\\n\"\n # In Py3k, TypeErrors will be raised because cmd is a string but stdin\n # expects bytes. In Python 2.x on the other hand, UnicodeEncodeErrors\n # will be raised if cmd is unicode. In both cases, encoding the string\n # will fix the problem.\n try:\n self.sub_proc.stdin.write(cmd)\n except (TypeError, UnicodeEncodeError):\n self.sub_proc.stdin.write(cmd.encode('utf-8', 'ignore'))\n time.sleep(0.1) # wait for mplayer (better idea?)\n # Expect a response for 'get_property' only\n if not expect:\n return\n while True:\n try:\n output = self.sub_proc.stdout.readline().rstrip()\n output = output.decode('utf-8')\n except IOError:\n return None\n # print output\n split_output = output.split('=')\n # print(split_output)\n if len(split_output) == 2 and split_output[0].strip() == expect:\n # We found it\n value = split_output[1]\n return value.strip()",
"def run_command(cmd):\n '''run_command uses subprocess to send a command to the terminal.\n :param cmd: the command to send, should be a list for subprocess\n '''\n output = Popen(cmd,stderr=STDOUT,stdout=PIPE)\n t = output.communicate()[0],output.returncode\n output = {'message':t[0],\n 'return_code':t[1]}\n\n return output",
"async def do_cmd(self, *args, success=None):\n \"\"\"\n Sends the given command to the server.\n\n Args:\n *args: Command and arguments to be sent to the server.\n\n Raises:\n ConnectionResetError: If the connection with the server is\n unexpectedely lost.\n SMTPCommandFailedError: If the command fails.\n\n Returns:\n (int, str): A (code, message) 2-tuple containing the server\n response.\n \"\"\"\n if success is None:\n success = (250,)\n\n cmd = \" \".join(args)\n\n await self.writer.send_command(cmd)\n code, message = await self.reader.read_reply()\n\n if code not in success:\n raise SMTPCommandFailedError(code, message, cmd)\n\n return code, message",
"def _send_command(self, cmd=\"\"):\n \"\"\"\n Handle reading/writing channel directly. It is also sanitizing the output received.\n\n Parameters\n ----------\n cmd : str, optional\n The command to send to the remote device (default : \"\", just send a new line)\n\n Returns\n -------\n output : str\n The output from the command sent\n \"\"\"\n self.connection.write_channel(cmd + \"\\n\")\n time.sleep(1)\n output = self.connection._read_channel_timing()\n output = self.connection.strip_ansi_escape_codes(output)\n output = self.connection.strip_backspaces(output)\n return output"
] |
[
0.8435419797897339,
0.744498074054718,
0.7368252277374268,
0.7289223670959473,
0.7211261987686157,
0.7043935656547546,
0.6924325227737427,
0.6888087391853333,
0.6862530708312988,
0.6852949857711792,
0.6839452385902405,
0.676463782787323
] |
Send command without return value, wait for completion, verify success.
:param cmd: command to send
|
def sendQueryVerify(self, cmd):
""" Send command without return value, wait for completion, verify success.
:param cmd: command to send
"""
cmd = cmd.strip()
self.logger.debug("sendQueryVerify(%s)", cmd)
if not self.is_connected():
raise socket.error("sendQueryVerify on a disconnected socket")
resp = self.__sendQueryReply(cmd)
if resp != self.reply_ok:
raise XenaCommandException('Command {} Fail Expected {} Actual {}'.format(cmd, self.reply_ok, resp))
self.logger.debug("SendQueryVerify(%s) Succeed", cmd)
|
[
"def send(self, cmd):\n \"\"\" Send a command to the bridge.\n\n :param cmd: List of command bytes.\n \"\"\"\n self._bridge.send(cmd, wait=self.wait, reps=self.reps)",
"def send_command(self, cmd, sudo=False, stderr=None, stdout=None):\n '''send command is a non interactive version of run_command, meaning\n that we execute the command and return the return value, but don't\n attempt to stream any content (text from the screen) back to the\n user. This is useful for commands interacting with OCI bundles.\n\n Parameters\n ==========\n cmd: the list of commands to send to the terminal\n sudo: use sudo (or not)\n '''\n \n if sudo is True:\n cmd = ['sudo'] + cmd\n\n process = subprocess.Popen(cmd, stderr=stderr, stdout=stdout)\n result = process.communicate()\n return result",
"def send(self, cmd=\"\", timeout=300, wait_for_string=None, password=False):\n \"\"\"Send the command to the device and return the output.\n\n Args:\n cmd (str): Command string for execution. Defaults to empty string.\n timeout (int): Timeout in seconds. Defaults to 300 sec (5 min)\n wait_for_string (str): This is optional string that driver\n waits for after command execution. If none the detected\n prompt will be used.\n password (bool): If true cmd representing password is not logged\n and condoor waits for noecho.\n\n Returns:\n A string containing the command output.\n\n Raises:\n ConnectionError: General connection error during command execution\n CommandSyntaxError: Command syntax error or unknown command.\n CommandTimeoutError: Timeout during command execution\n\n \"\"\"\n return self._chain.send(cmd, timeout, wait_for_string, password)",
"def run_command(cmd):\n '''run_command uses subprocess to send a command to the terminal.\n :param cmd: the command to send, should be a list for subprocess\n '''\n output = Popen(cmd,stderr=STDOUT,stdout=PIPE)\n t = output.communicate()[0],output.returncode\n output = {'message':t[0],\n 'return_code':t[1]}\n\n return output",
"def __sendCommand(self, cmd):\n \"\"\"send specific command to reference unit over serial port\n\n Args:\n cmd: OpenThread_WpanCtl command string\n\n Returns:\n Fail: Failed to send the command to reference unit and parse it\n Value: successfully retrieve the desired value from reference unit\n Error: some errors occur, indicates by the followed specific error number\n \"\"\"\n logging.info('%s: sendCommand[%s]', self.port, cmd)\n if self.logThreadStatus == self.logStatus['running']:\n self.logThreadStatus = self.logStatus['pauseReq']\n while self.logThreadStatus != self.logStatus['paused'] and self.logThreadStatus != self.logStatus['stop']:\n pass\n\n ssh_stdin = None\n ssh_stdout = None\n ssh_stderr = None\n try:\n # command retransmit times\n retry_times = 3\n while retry_times > 0:\n retry_times -= 1\n try:\n if self._is_net:\n ssh_stdin, ssh_stdout, ssh_stderr = self.handle.exec_command(cmd)\n else:\n self._sendline(cmd)\n self._expect(cmd)\n except Exception as e:\n logging.exception('%s: failed to send command[%s]: %s', self.port, cmd, str(e))\n if retry_times == 0:\n raise\n else:\n break\n\n line = None\n response = []\n retry_times = 20\n stdout_lines = []\n stderr_lines = []\n if self._is_net:\n stdout_lines = ssh_stdout.readlines()\n stderr_lines = ssh_stderr.readlines()\n if stderr_lines:\n for stderr_line in stderr_lines:\n if re.search(r'Not\\s+Found|failed\\s+with\\s+error', stderr_line.strip(), re.M | re.I):\n print \"Command failed:\" + stderr_line\n return 'Fail'\n print \"Got line: \" + stderr_line\n logging.info('%s: the read line is[%s]', self.port, stderr_line)\n response.append(str(stderr_line.strip()))\n elif stdout_lines:\n for stdout_line in stdout_lines:\n logging.info('%s: the read line is[%s]', self.port, stdout_line)\n if re.search(r'Not\\s+Found|failed\\s+with\\s+error', stdout_line.strip(), re.M | re.I):\n print \"Command failed\"\n return 'Fail'\n print \"Got line: \" + stdout_line\n logging.info('%s: send command[%s] done!', self.port, cmd)\n response.append(str(stdout_line.strip()))\n response.append(WPAN_CARRIER_PROMPT)\n return response\n else:\n while retry_times > 0:\n line = self._readline()\n print \"read line: %s\" % line\n logging.info('%s: the read line is[%s]', self.port, line)\n if line:\n response.append(line)\n if re.match(WPAN_CARRIER_PROMPT, line):\n break\n elif re.search(r'Not\\s+Found|failed\\s+with\\s+error', line, re.M | re.I):\n print \"Command failed\"\n return 'Fail'\n\n retry_times -= 1\n time.sleep(0.1)\n\n if retry_times == 0:\n raise Exception('%s: failed to find end of response' % self.port)\n logging.info('%s: send command[%s] done!', self.port, cmd)\n return response\n except Exception, e:\n ModuleHelper.WriteIntoDebugLogger('sendCommand() Error: ' + str(e))\n raise",
"def send_command(self, cmd):\n \"\"\"Sends a generic command into FIFO.\n\n :param bytes cmd: Command chars to send into FIFO.\n\n \"\"\"\n if not cmd:\n return\n\n with open(self.fifo, 'wb') as f:\n f.write(cmd)",
"def command(self, cmd, expected_retcode=0): # pylint: disable=invalid-name\n # expected_retcode kwd argument is used in many test cases, we cannot rename it.\n \"\"\"\n Shortcut for sending a command to this node specifically.\n :param cmd: Command to send\n :param expected_retcode: Expected return code as int, default is 0\n :return: CliResponse\n \"\"\"\n return self.bench.execute_command(self.endpoint_id, cmd, expected_retcode=expected_retcode)",
"def sendcmd(self, cmd='AT', timeout=1.0):\n \"\"\"send command, wait for response. returns response from modem.\"\"\"\n import time\n if self.write(cmd):\n while self.get_response() == '' and timeout > 0:\n time.sleep(0.1)\n timeout -= 0.1\n return self.get_lines()",
"def _send_command(self, cmd=\"\"):\n \"\"\"\n Handle reading/writing channel directly. It is also sanitizing the output received.\n\n Parameters\n ----------\n cmd : str, optional\n The command to send to the remote device (default : \"\", just send a new line)\n\n Returns\n -------\n output : str\n The output from the command sent\n \"\"\"\n self.connection.write_channel(cmd + \"\\n\")\n time.sleep(1)\n output = self.connection._read_channel_timing()\n output = self.connection.strip_ansi_escape_codes(output)\n output = self.connection.strip_backspaces(output)\n return output",
"def __sendCommand(self, cmd):\n \"\"\"send specific command to reference unit over serial port\n\n Args:\n cmd: OpenThread CLI string\n\n Returns:\n Done: successfully send the command to reference unit and parse it\n Value: successfully retrieve the desired value from reference unit\n Error: some errors occur, indicates by the followed specific error number\n \"\"\"\n logging.info('%s: sendCommand[%s]', self.port, cmd)\n if self.logThreadStatus == self.logStatus['running']:\n self.logThreadStatus = self.logStatus['pauseReq']\n while self.logThreadStatus != self.logStatus['paused'] and self.logThreadStatus != self.logStatus['stop']:\n pass\n\n try:\n # command retransmit times\n retry_times = 3\n while retry_times > 0:\n retry_times -= 1\n try:\n self._sendline(cmd)\n self._expect(cmd)\n except Exception as e:\n logging.exception('%s: failed to send command[%s]: %s', self.port, cmd, str(e))\n if retry_times == 0:\n raise\n else:\n break\n\n line = None\n response = []\n retry_times = 10\n while retry_times > 0:\n line = self._readline()\n logging.info('%s: the read line is[%s]', self.port, line)\n if line:\n response.append(line)\n if line == 'Done':\n break\n else:\n retry_times -= 1\n time.sleep(0.2)\n if line != 'Done':\n raise Exception('%s: failed to find end of response' % self.port)\n logging.info('%s: send command[%s] done!', self.port, cmd)\n return response\n except Exception, e:\n ModuleHelper.WriteIntoDebugLogger(\"sendCommand() Error: \" + str(e))\n raise",
"def sendline(self, cmd):\n '''\n Send this command to the server and\n return a tuple of the output and the stderr.\n\n The format for parameters is:\n\n cmd (string): The command to send to the sever.\n '''\n self.conn.sendline(cmd, self.linesep)\n\n # saw_prompt = False\n ret_stdout = []\n ret_stderr = []\n while self.conn.has_unread_data:\n stdout, stderr = self.conn.recv()\n\n if stdout:\n ret_stdout.append(stdout)\n if stderr:\n log.debug('Error while executing command.')\n ret_stderr.append(stderr)\n\n if stdout and self.prompt_re.search(stdout):\n break\n\n return ''.join(ret_stdout), ''.join(ret_stderr)",
"def send(self, cmd=\"\", timeout=60, wait_for_string=None, password=False):\n \"\"\"Send the command to the device and return the output.\n\n Args:\n cmd (str): Command string for execution. Defaults to empty string.\n timeout (int): Timeout in seconds. Defaults to 60s\n wait_for_string (str): This is optional string that driver\n waits for after command execution. If none the detected\n prompt will be used.\n password (bool): If true cmd representing password is not logged\n and condoor waits for noecho.\n\n Returns:\n A string containing the command output.\n\n Raises:\n ConnectionError: General connection error during command execution\n CommandSyntaxError: Command syntax error or unknown command.\n CommandTimeoutError: Timeout during command execution\n\n \"\"\"\n if self.connected:\n output = ''\n if password:\n self.chain.connection.log(\"Sending password\")\n else:\n self.chain.connection.log(\"Sending command: '{}'\".format(cmd))\n\n try:\n output = self.execute_command(cmd, timeout, wait_for_string, password)\n except ConnectionError:\n self.chain.connection.log(\"Connection lost. Disconnecting.\")\n # self.disconnect()\n raise\n\n if password:\n self.chain.connection.log(\"Password sent successfully\")\n else:\n self.chain.connection.log(\"Command executed successfully: '{}'\".format(cmd))\n\n return output\n\n else:\n raise ConnectionError(\"Device not connected\", host=self.hostname)"
] |
[
0.7671437859535217,
0.7568156719207764,
0.7517185211181641,
0.7498824000358582,
0.7450752258300781,
0.7445734739303589,
0.7442312240600586,
0.7423124313354492,
0.7350210547447205,
0.7292740941047668,
0.7287125587463379,
0.7251755595207214
] |
Scan all SHIELDHIT12A config files to find external files used and return them.
Also change paths in config files to match convention that all resources are
symlinked in job_xxxx/symlink
|
def find_external_files(self, run_input_dir):
"""
Scan all SHIELDHIT12A config files to find external files used and return them.
Also change paths in config files to match convention that all resources are
symlinked in job_xxxx/symlink
"""
beam_file, geo_file, mat_file, _ = self.input_files
# check for external files in BEAM input file
external_beam_files = self._parse_beam_file(beam_file, run_input_dir)
if external_beam_files:
logger.info("External files from BEAM file: {0}".format(external_beam_files))
else:
logger.debug("No external files from BEAM file")
# check for external files in MAT input file
icru_numbers = self._parse_mat_file(mat_file)
if icru_numbers:
logger.info("External files from MAT file: {0}".format(icru_numbers))
else:
logger.debug("No external files from MAT file")
# if ICRU+LOADEX pairs were found - get file names for external material files
icru_files = []
if icru_numbers:
icru_files = self._decrypt_icru_files(icru_numbers)
# check for external files in GEO input file
geo_files = self._parse_geo_file(geo_file, run_input_dir)
if geo_files:
logger.info("External files from GEO file: {0}".format(geo_files))
else:
logger.debug("No external files from GEO file")
external_files = external_beam_files + icru_files + geo_files
return [os.path.join(self.input_path, e) for e in external_files]
|
[
"def _parse_beam_file(self, file_path, run_input_dir):\n \"\"\"Scan SH12A BEAM file for references to external files and return them\"\"\"\n external_files = []\n paths_to_replace = []\n with open(file_path, 'r') as beam_f:\n for line in beam_f.readlines():\n split_line = line.split()\n # line length checking to prevent IndexError\n if len(split_line) > 2 and split_line[0] == \"USEBMOD\":\n logger.debug(\"Found reference to external file in BEAM file: {0} {1}\".format(\n split_line[0], split_line[2]))\n external_files.append(split_line[2])\n paths_to_replace.append(split_line[2])\n elif len(split_line) > 1 and split_line[0] == \"USECBEAM\":\n logger.debug(\"Found reference to external file in BEAM file: {0} {1}\".format(\n split_line[0], split_line[1]))\n external_files.append(split_line[1])\n paths_to_replace.append(split_line[1])\n if paths_to_replace:\n run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1])\n logger.debug(\"Calling rewrite_paths method on file: {0}\".format(run_dir_config_file))\n self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace)\n return external_files",
"def _parse_geo_file(self, file_path, run_input_dir):\n \"\"\"Scan SH12A GEO file for references to external files (like voxelised geometry) and return them\"\"\"\n external_files = []\n paths_to_replace = []\n with open(file_path, 'r') as geo_f:\n for line in geo_f.readlines():\n split_line = line.split()\n if len(split_line) > 0 and not line.startswith(\"*\"):\n base_path = os.path.join(self.input_path, split_line[0])\n if os.path.isfile(base_path + '.hed'):\n logger.debug(\"Found ctx + hed files: {0}\".format(base_path))\n external_files.append(base_path + '.hed')\n # try to find ctx file\n if os.path.isfile(base_path + '.ctx'):\n external_files.append(base_path + '.ctx')\n elif os.path.isfile(base_path + '.ctx.gz'):\n external_files.append(base_path + '.ctx.gz')\n # replace path to match symlink location\n paths_to_replace.append(split_line[0])\n if paths_to_replace:\n run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1])\n logger.debug(\"Calling rewrite_paths method on file: {0}\".format(run_dir_config_file))\n self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace)\n return external_files",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n comp_file = args.get('comp', None)\n if comp_file is not None:\n comp_dict = yaml.safe_load(open(comp_file))\n coordsys = comp_dict.pop('coordsys')\n for v in comp_dict.values():\n v['coordsys'] = coordsys\n else:\n return job_configs\n\n datafile = args['data']\n if datafile is None or datafile == 'None':\n return job_configs\n NAME_FACTORY.update_base_dict(args['data'])\n\n inputfiles = create_inputlist(args['ft1file'])\n outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes')\n data_ver = NAME_FACTORY.base_dict['data_ver']\n\n for idx, infile in enumerate(inputfiles):\n key = \"%06i\" % idx\n key_scfile = \"%03i\" % (idx + 1)\n output_dir = os.path.join(outdir_base, key)\n try:\n os.mkdir(output_dir)\n except OSError:\n pass\n scfile = args['ft2file'].replace('.lst', '_%s.fits' % key_scfile)\n logfile = make_nfs_path(os.path.join(output_dir,\n 'scatter_mk_%s_%s.log' % (data_ver, key)))\n\n job_configs[key] = comp_dict.copy()\n job_configs[key].update(dict(ft1file=infile,\n scfile=scfile,\n comp=args['comp'],\n hpx_order_max=args['hpx_order_max'],\n outdir=outdir_base,\n outkey=key,\n logfile=logfile,\n pfiles=output_dir))\n\n return job_configs",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n ttype = args['ttype']\n (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)\n if targets_yaml is None:\n return job_configs\n\n config_yaml = 'config.yaml'\n config_override = args.get('config')\n if is_not_null(config_override):\n config_yaml = config_override\n\n targets = load_yaml(targets_yaml)\n nsims_job = args['nsims_job']\n first_seed = args['seed']\n nsims = args['nsims']\n last_seed = first_seed + nsims\n\n base_config = dict(sim_profile=args['sim_profile'],\n roi_baseline=args['roi_baseline'],\n non_null_src=args['non_null_src'],\n sim=sim)\n\n for target_name, target_list in targets.items():\n name_keys = dict(target_type=ttype,\n target_name=target_name,\n sim_name=sim,\n fullpath=True)\n simdir = NAME_FACTORY.sim_targetdir(**name_keys)\n config_path = os.path.join(simdir, config_yaml)\n\n job_config = base_config.copy()\n job_config.update(dict(config=config_path,\n profiles=target_list))\n\n current_seed = first_seed\n while current_seed < last_seed:\n fullkey = \"%s_%06i\" % (target_name, current_seed)\n logfile = make_nfs_path(os.path.join(simdir, \"%s_%s_%06i.log\" % (self.linkname, \n target_name, current_seed)))\n if nsims_job <= 0 or current_seed + nsims_job >= last_seed:\n nsims_current = last_seed - current_seed\n else:\n nsims_current = nsims_job\n job_config.update(dict(seed=current_seed,\n nsims=nsims_current,\n logfile=logfile))\n job_configs[fullkey] = job_config.copy()\n current_seed += nsims_current\n\n return job_configs",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n components = Component.build_from_yamlfile(args['comp'])\n NAME_FACTORY.update_base_dict(args['data'])\n\n ret_dict = make_diffuse_comp_info_dict(components=components,\n library=args['library'],\n basedir=NAME_FACTORY.base_dict['basedir'])\n diffuse_comp_info_dict = ret_dict['comp_info_dict']\n\n for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()):\n diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key]\n\n for comp in components:\n zcut = \"zmax%i\" % comp.zmax\n key = comp.make_key('{ebin_name}_{evtype_name}')\n if diffuse_comp_info_value.components is None:\n sub_comp_info = diffuse_comp_info_value\n else:\n sub_comp_info = diffuse_comp_info_value.get_component_info(comp)\n name_keys = dict(zcut=zcut,\n sourcekey=sub_comp_info.sourcekey,\n ebin=comp.ebin_name,\n psftype=comp.evtype_name,\n mktime='none',\n coordsys=comp.coordsys,\n irf_ver=NAME_FACTORY.irf_ver(),\n fullpath=True)\n\n outfile = NAME_FACTORY.srcmaps(**name_keys)\n outfile_tokens = os.path.splitext(outfile)\n infile_regexp = \"%s_*.fits*\" % outfile_tokens[0]\n full_key = \"%s_%s\" % (sub_comp_info.sourcekey, key)\n logfile = make_nfs_path(outfile.replace('.fits', '.log'))\n job_configs[full_key] = dict(output=outfile,\n args=infile_regexp,\n hdu=sub_comp_info.source_name,\n logfile=logfile)\n\n return job_configs",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n ttype = args['ttype']\n (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)\n if sim is not None:\n raise ValueError(\"Found 'sim' argument on AnalyzeROI_SG config.\")\n if targets_yaml is None:\n return job_configs\n\n config_yaml = 'config.yaml'\n config_override = args.get('config')\n if is_not_null(config_override):\n config_yaml = config_override\n\n targets = load_yaml(targets_yaml)\n base_config = dict(roi_baseline=args['roi_baseline'],\n make_plots=args['make_plots'])\n\n for target_name in targets.keys():\n name_keys = dict(target_type=ttype,\n target_name=target_name,\n fullpath=True)\n target_dir = NAME_FACTORY.targetdir(**name_keys)\n config_path = os.path.join(target_dir, config_yaml)\n logfile = make_nfs_path(os.path.join(\n target_dir, \"%s_%s.log\" % (self.linkname, target_name)))\n job_config = base_config.copy() \n job_config.update(dict(config=config_path,\n logfile=logfile))\n job_configs[target_name] = job_config\n\n return job_configs",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n ttype = args['ttype']\n (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(args)\n if targets_yaml is None:\n return job_configs\n\n config_yaml = 'config.yaml'\n config_override = args.get('config')\n if is_not_null(config_override):\n config_yaml = config_override\n\n rand_yaml = NAME_FACTORY.resolve_randconfig(args)\n\n targets = load_yaml(targets_yaml)\n\n base_config = dict(rand_config=rand_yaml)\n\n for target_name in targets.keys():\n name_keys = dict(target_type=ttype,\n target_name=target_name,\n sim_name=sim,\n fullpath=True)\n simdir = NAME_FACTORY.sim_targetdir(**name_keys)\n config_path = os.path.join(simdir, config_yaml)\n outfile = os.path.join(simdir, 'skydirs.yaml')\n logfile = make_nfs_path(outfile.replace('yaml', 'log'))\n job_config = base_config.copy()\n job_config.update(dict(config=config_path,\n outfile=outfile,\n logfile=logfile))\n job_configs[target_name] = job_config\n\n return job_configs",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n components = Component.build_from_yamlfile(args['comp'])\n NAME_FACTORY.update_base_dict(args['data'])\n\n ret_dict = make_diffuse_comp_info_dict(components=components,\n library=args['library'],\n basedir='.')\n diffuse_comp_info_dict = ret_dict['comp_info_dict']\n if args['make_xml']:\n SrcmapsDiffuse_SG._make_xml_files(diffuse_comp_info_dict)\n\n for diffuse_comp_info_key in sorted(diffuse_comp_info_dict.keys()):\n diffuse_comp_info_value = diffuse_comp_info_dict[diffuse_comp_info_key]\n no_psf = diffuse_comp_info_value.no_psf\n for comp in components:\n zcut = \"zmax%i\" % comp.zmax\n key = comp.make_key('{ebin_name}_{evtype_name}')\n if diffuse_comp_info_value.components is None:\n sub_comp_info = diffuse_comp_info_value\n else:\n sub_comp_info = diffuse_comp_info_value.get_component_info(comp)\n name_keys = dict(zcut=zcut,\n sourcekey=sub_comp_info.sourcekey,\n ebin=comp.ebin_name,\n psftype=comp.evtype_name,\n mktime='none',\n coordsys=comp.coordsys,\n irf_ver=NAME_FACTORY.irf_ver(),\n fullpath=True)\n\n kmin = 0\n kmax = comp.enumbins + 1\n outfile_base = NAME_FACTORY.srcmaps(**name_keys)\n kstep = HPX_ORDER_TO_KSTEP[comp.hpx_order]\n base_dict = dict(cmap=NAME_FACTORY.ccube(**name_keys),\n expcube=NAME_FACTORY.ltcube(**name_keys),\n irfs=NAME_FACTORY.irfs(**name_keys),\n bexpmap=NAME_FACTORY.bexpcube(**name_keys),\n srcmdl=sub_comp_info.srcmdl_name,\n source=sub_comp_info.source_name,\n no_psf=no_psf,\n evtype=comp.evtype)\n\n if kstep < 0:\n kstep = kmax\n else:\n pass\n\n for k in range(kmin, kmax, kstep):\n full_key = \"%s_%s_%02i\" % (diffuse_comp_info_key, key, k)\n khi = min(kmax, k + kstep)\n\n full_dict = base_dict.copy()\n outfile = outfile_base.replace('.fits', '_%02i.fits' % k)\n logfile = make_nfs_path(outfile_base.replace('.fits', '_%02i.log' % k))\n full_dict.update(dict(outfile=outfile,\n kmin=k, kmax=khi,\n logfile=logfile))\n job_configs[full_key] = full_dict\n\n return job_configs",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n ttype = args['ttype']\n (targets_yaml, sim) = NAME_FACTORY.resolve_targetfile(\n args, require_sim_name=True)\n if targets_yaml is None:\n return job_configs\n\n write_full = args['write_full']\n\n targets = load_yaml(targets_yaml)\n\n base_config = dict(config=args['config'],\n nsims=args['nsims'],\n seed=args['seed'])\n\n first = args['seed']\n last = first + args['nsims'] - 1\n\n for target_name, profile_list in targets.items():\n for profile in profile_list:\n full_key = \"%s:%s:%s\" % (target_name, profile, sim)\n name_keys = dict(target_type=ttype,\n target_name=target_name,\n sim_name=sim,\n profile=profile,\n fullpath=True)\n sed_file = NAME_FACTORY.sim_sedfile(**name_keys)\n outfile = sed_file.replace(\n '_SEED.fits', '_collected_%06i_%06i.fits' % (first, last))\n logfile = make_nfs_path(outfile.replace('.fits', '.log'))\n if not write_full:\n outfile = None\n summaryfile = sed_file.replace(\n '_SEED.fits', '_summary_%06i_%06i.fits' % (first, last))\n job_config = base_config.copy()\n job_config.update(dict(sed_file=sed_file,\n outfile=outfile,\n summaryfile=summaryfile,\n logfile=logfile))\n job_configs[full_key] = job_config\n\n return job_configs",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n gmm = make_ring_dicts(library=args['library'], basedir='.')\n\n for galkey in gmm.galkeys():\n ring_dict = gmm.ring_dict(galkey)\n for ring_key, ring_info in ring_dict.items():\n output_file = ring_info.merged_gasmap\n file_string = \"\"\n for fname in ring_info.files:\n file_string += \" %s\" % fname\n logfile = make_nfs_path(output_file.replace('.fits', '.log'))\n job_configs[ring_key] = dict(output=output_file,\n args=file_string,\n logfile=logfile)\n\n return job_configs",
"def fix_paths(job):\n \"\"\"\n Coerce input arguments to use temporary files when used for output.\n\n Return a list of temporary file pairs (tmpfile, destination path) and\n a list of arguments.\n\n Converts each HdfsTarget to a string for the path.\n \"\"\"\n tmp_files = []\n args = []\n for x in job.args():\n if isinstance(x, luigi.contrib.hdfs.HdfsTarget): # input/output\n if x.exists() or not job.atomic_output(): # input\n args.append(x.path)\n else: # output\n x_path_no_slash = x.path[:-1] if x.path[-1] == '/' else x.path\n y = luigi.contrib.hdfs.HdfsTarget(x_path_no_slash + '-luigi-tmp-%09d' % random.randrange(0, 1e10))\n tmp_files.append((y, x_path_no_slash))\n logger.info('Using temp path: %s for path %s', y.path, x.path)\n args.append(y.path)\n else:\n try:\n # hopefully the target has a path to use\n args.append(x.path)\n except AttributeError:\n # if there's no path then hope converting it to a string will work\n args.append(str(x))\n\n return (tmp_files, args)",
"def build_job_configs(self, args):\n \"\"\"Hook to build job configurations\n \"\"\"\n job_configs = {}\n\n components = Component.build_from_yamlfile(args['comp'])\n\n datafile = args['data']\n if datafile is None or datafile == 'None':\n return job_configs\n NAME_FACTORY.update_base_dict(args['data'])\n outdir_base = os.path.join(NAME_FACTORY.base_dict['basedir'], 'counts_cubes')\n\n inputfiles = create_inputlist(args['ft1file'])\n num_files = len(inputfiles)\n\n for comp in components:\n zcut = \"zmax%i\" % comp.zmax\n\n mktimelist = copy.copy(comp.mktimefilters)\n if not mktimelist:\n mktimelist.append('none')\n evtclasslist_keys = copy.copy(comp.evtclasses)\n if not evtclasslist_keys:\n evtclasslist_vals = [NAME_FACTORY.base_dict['evclass']]\n else:\n evtclasslist_vals = copy.copy(evtclasslist_keys)\n\n for mktimekey in mktimelist:\n for evtclassval in evtclasslist_vals:\n fullkey = comp.make_key(\n '%s_%s_{ebin_name}_%s_{evtype_name}' %\n (evtclassval, zcut, mktimekey))\n\n name_keys = dict(zcut=zcut,\n ebin=comp.ebin_name,\n psftype=comp.evtype_name,\n coordsys=comp.coordsys,\n irf_ver=NAME_FACTORY.irf_ver(),\n mktime=mktimekey,\n evclass=evtclassval,\n fullpath=True)\n\n ccube_name = os.path.basename(NAME_FACTORY.ccube(**name_keys))\n outfile = os.path.join(outdir_base, ccube_name)\n infiles = _make_input_file_list(outfile, num_files)\n logfile = make_nfs_path(outfile.replace('.fits', '.log'))\n job_configs[fullkey] = dict(args=infiles,\n output=outfile,\n logfile=logfile)\n\n return job_configs"
] |
[
0.7037990093231201,
0.6976031064987183,
0.691632091999054,
0.6895419359207153,
0.68953937292099,
0.689534068107605,
0.6879248023033142,
0.6852285265922546,
0.6822295784950256,
0.679693341255188,
0.67869633436203,
0.6767182946205139
] |
Scan SH12A BEAM file for references to external files and return them
|
def _parse_beam_file(self, file_path, run_input_dir):
"""Scan SH12A BEAM file for references to external files and return them"""
external_files = []
paths_to_replace = []
with open(file_path, 'r') as beam_f:
for line in beam_f.readlines():
split_line = line.split()
# line length checking to prevent IndexError
if len(split_line) > 2 and split_line[0] == "USEBMOD":
logger.debug("Found reference to external file in BEAM file: {0} {1}".format(
split_line[0], split_line[2]))
external_files.append(split_line[2])
paths_to_replace.append(split_line[2])
elif len(split_line) > 1 and split_line[0] == "USECBEAM":
logger.debug("Found reference to external file in BEAM file: {0} {1}".format(
split_line[0], split_line[1]))
external_files.append(split_line[1])
paths_to_replace.append(split_line[1])
if paths_to_replace:
run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1])
logger.debug("Calling rewrite_paths method on file: {0}".format(run_dir_config_file))
self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace)
return external_files
|
[
"def find_external_files(self, run_input_dir):\n \"\"\"\n Scan all SHIELDHIT12A config files to find external files used and return them.\n Also change paths in config files to match convention that all resources are\n symlinked in job_xxxx/symlink\n \"\"\"\n beam_file, geo_file, mat_file, _ = self.input_files\n\n # check for external files in BEAM input file\n external_beam_files = self._parse_beam_file(beam_file, run_input_dir)\n if external_beam_files:\n logger.info(\"External files from BEAM file: {0}\".format(external_beam_files))\n else:\n logger.debug(\"No external files from BEAM file\")\n\n # check for external files in MAT input file\n icru_numbers = self._parse_mat_file(mat_file)\n if icru_numbers:\n logger.info(\"External files from MAT file: {0}\".format(icru_numbers))\n else:\n logger.debug(\"No external files from MAT file\")\n # if ICRU+LOADEX pairs were found - get file names for external material files\n icru_files = []\n if icru_numbers:\n icru_files = self._decrypt_icru_files(icru_numbers)\n\n # check for external files in GEO input file\n geo_files = self._parse_geo_file(geo_file, run_input_dir)\n if geo_files:\n logger.info(\"External files from GEO file: {0}\".format(geo_files))\n else:\n logger.debug(\"No external files from GEO file\")\n\n external_files = external_beam_files + icru_files + geo_files\n return [os.path.join(self.input_path, e) for e in external_files]",
"def _parse_geo_file(self, file_path, run_input_dir):\n \"\"\"Scan SH12A GEO file for references to external files (like voxelised geometry) and return them\"\"\"\n external_files = []\n paths_to_replace = []\n with open(file_path, 'r') as geo_f:\n for line in geo_f.readlines():\n split_line = line.split()\n if len(split_line) > 0 and not line.startswith(\"*\"):\n base_path = os.path.join(self.input_path, split_line[0])\n if os.path.isfile(base_path + '.hed'):\n logger.debug(\"Found ctx + hed files: {0}\".format(base_path))\n external_files.append(base_path + '.hed')\n # try to find ctx file\n if os.path.isfile(base_path + '.ctx'):\n external_files.append(base_path + '.ctx')\n elif os.path.isfile(base_path + '.ctx.gz'):\n external_files.append(base_path + '.ctx.gz')\n # replace path to match symlink location\n paths_to_replace.append(split_line[0])\n if paths_to_replace:\n run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1])\n logger.debug(\"Calling rewrite_paths method on file: {0}\".format(run_dir_config_file))\n self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace)\n return external_files",
"private List<WyilFile> getExternals() throws IOException {\n\t\tArrayList<WyilFile> externals = new ArrayList<>();\n\t\tList<Build.Package> pkgs = project.getPackages();\n\t\t// Consider each package in turn and identify all contained WyilFiles\n\t\tfor (int i = 0; i != pkgs.size(); ++i) {\n\t\t\tBuild.Package p = pkgs.get(i);\n\t\t\t// FIXME: This is kind broken me thinks. Potentially, we should be able to\n\t\t\t// figure out what modules are supplied via the configuration.\n\t\t\tList<Path.Entry<WyilFile>> entries = p.getRoot().get(Content.filter(\"**/*\", WyilFile.ContentType));\n\t\t\tfor (int j = 0; j != entries.size(); ++j) {\n\t\t\t\texternals.add(entries.get(j).read());\n\t\t\t}\n\t\t}\n\t\treturn externals;\n\t}",
"def get_refs(genome_build, aligner, galaxy_base, data):\n \"\"\"Retrieve the reference genome file location from galaxy configuration.\n \"\"\"\n out = {}\n name_remap = {\"samtools\": \"fasta\"}\n if genome_build:\n galaxy_config = _get_galaxy_tool_info(galaxy_base)\n for name in [x for x in (\"samtools\", aligner) if x]:\n galaxy_dt = _get_galaxy_data_table(name, galaxy_config[\"tool_data_table_config_path\"])\n loc_file, need_remap = _get_galaxy_loc_file(name, galaxy_dt, galaxy_config[\"tool_data_path\"],\n galaxy_base)\n cur_ref = _get_ref_from_galaxy_loc(name, genome_build, loc_file, galaxy_dt, need_remap,\n galaxy_config, data)\n base = os.path.normpath(utils.add_full_path(cur_ref, galaxy_config[\"tool_data_path\"]))\n # Expand directories unless we are an aligner like minimap2 that uses the seq directory\n if os.path.isdir(base) and not (need_remap and os.path.basename(base) == \"seq\"):\n indexes = sorted(glob.glob(os.path.join(base, \"*\")))\n elif name != \"samtools\":\n indexes = sorted(glob.glob(\"%s*\" % utils.splitext_plus(base)[0]))\n else:\n indexes = []\n name = name_remap.get(name, name)\n out[name] = {}\n if os.path.exists(base) and os.path.isfile(base):\n out[name][\"base\"] = base\n if indexes:\n out[name][\"indexes\"] = indexes\n # For references, add compressed inputs and indexes if they exist\n if name == \"fasta\" and \"base\" in out[name] and os.path.exists(out[name][\"base\"] + \".gz\"):\n indexes = [out[name][\"base\"] + \".gz.fai\", out[name][\"base\"] + \".gz.gzi\",\n utils.splitext_plus(out[name][\"base\"])[0] + \".dict\"]\n out[name + \"gz\"] = {\"base\": out[name][\"base\"] + \".gz\",\n \"indexes\": [x for x in indexes if os.path.exists(x)]}\n # add additional indices relative to the base\n if tz.get_in([\"fasta\", \"base\"], out):\n ref_dir, ref_filebase = os.path.split(out[\"fasta\"][\"base\"])\n rtg_dir = os.path.normpath(os.path.join(ref_dir, os.path.pardir, \"rtg\",\n \"%s.sdf\" % (os.path.splitext(ref_filebase)[0])))\n out[\"rtg\"] = {\"base\": os.path.join(rtg_dir, \"mainIndex\"),\n \"indexes\": [x for x in glob.glob(os.path.join(rtg_dir, \"*\"))\n if not x.endswith(\"/mainIndex\")]}\n twobit = os.path.normpath(os.path.join(ref_dir, os.path.pardir, \"ucsc\",\n \"%s.2bit\" % (os.path.splitext(ref_filebase)[0])))\n if os.path.exists(twobit):\n out[\"twobit\"] = twobit\n return out",
"def get_external_references(self):\n \"\"\"\n Iterator that returns all the external references of the markable\n @rtype: L{CexternalReference}\n @return: the external references\n \"\"\"\n for ext_ref_node in self.node.findall('externalReferences'):\n ext_refs_obj = CexternalReferences(ext_ref_node)\n for ref in ext_refs_obj:\n yield ref",
"def readabt(filename, dirs='.'):\n \"\"\"Read abt_*.fio type files from beamline B1, HASYLAB.\n\n Input:\n filename: the name of the file.\n dirs: directories to search for files in\n\n Output:\n A dictionary. The fields are self-explanatory.\n \"\"\"\n # resolve filename\n filename = misc.findfileindirs(filename, dirs)\n f = open(filename, 'rt')\n abt = {'offsetcorrected': False, 'params': {}, 'columns': [], 'data': [], 'title': '<no_title>',\n 'offsets': {}, 'filename': filename}\n readingmode = ''\n for l in f:\n l = l.strip()\n if l.startswith('!') or len(l) == 0:\n continue\n elif l.startswith('%c'):\n readingmode = 'comments'\n elif l.startswith('%p'):\n readingmode = 'params'\n elif l.startswith('%d'):\n readingmode = 'data'\n elif readingmode == 'comments':\n m = re.match(\n r'(?P<scantype>\\w+)-Scan started at (?P<startdate>\\d+-\\w+-\\d+) (?P<starttime>\\d+:\\d+:\\d+), ended (?P<endtime>\\d+:\\d+:\\d+)', l)\n if m:\n abt.update(m.groupdict())\n continue\n else:\n m = re.match(r'Name: (?P<name>\\w+)', l)\n if m:\n abt.update(m.groupdict())\n m1 = re.search(r'from (?P<from>\\d+(?:.\\d+)?)', l)\n if m1:\n abt.update(m1.groupdict())\n m1 = re.search(r'to (?P<to>\\d+(?:.\\d+)?)', l)\n if m1:\n abt.update(m1.groupdict())\n m1 = re.search(r'by (?P<by>\\d+(?:.\\d+)?)', l)\n if m1:\n abt.update(m1.groupdict())\n m1 = re.search(r'sampling (?P<sampling>\\d+(?:.\\d+)?)', l)\n if m1:\n abt.update(m1.groupdict())\n continue\n if l.find('Counter readings are offset corrected') >= 0:\n abt['offsetcorrected'] = True\n readingmode = 'offsets'\n continue\n # if we reach here in 'comments' mode, this is the title line\n abt['title'] = l\n continue\n elif readingmode == 'offsets':\n m = re.findall(r'(\\w+)\\s(\\d+(?:.\\d+)?)', l)\n if m:\n abt['offsets'].update(dict(m))\n for k in abt['offsets']:\n abt['offsets'][k] = float(abt['offsets'][k])\n elif readingmode == 'params':\n abt['params'][l.split('=')[0].strip()] = float(\n l.split('=')[1].strip())\n elif readingmode == 'data':\n if l.startswith('Col'):\n abt['columns'].append(l.split()[2])\n else:\n abt['data'].append([float(x) for x in l.split()])\n f.close()\n # some post-processing\n # remove common prefix from column names\n maxcolnamelen = max(len(c) for c in abt['columns'])\n l = 1\n for l in range(1, maxcolnamelen):\n if len(set([c[:l] for c in abt['columns']])) > 1:\n break\n abt['columns'] = [c[l - 1:] for c in abt['columns']]\n # represent data as a structured array\n dt = np.dtype(list(zip(abt['columns'], itertools.repeat(np.double))))\n abt['data'] = np.array(abt['data'], dtype=np.double).view(dt)\n # dates and times in datetime formats\n monthnames = ['Jan', 'Feb', 'Mar', 'Apr', 'May',\n 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']\n for m, i in zip(monthnames, itertools.count(1)):\n abt['startdate'] = abt['startdate'].replace(m, str(i))\n abt['startdate'] = datetime.date(\n *reversed([int(x) for x in abt['startdate'].split('-')]))\n abt['starttime'] = datetime.time(\n *[int(x) for x in abt['starttime'].split(':')])\n abt['endtime'] = datetime.time(\n *[int(x) for x in abt['endtime'].split(':')])\n abt['start'] = datetime.datetime.combine(\n abt['startdate'], abt['starttime'])\n if abt['endtime'] <= abt['starttime']:\n abt['end'] = datetime.datetime.combine(\n abt['startdate'] + datetime.timedelta(1), abt['endtime'])\n else:\n abt['end'] = datetime.datetime.combine(\n abt['startdate'], abt['endtime'])\n del abt['starttime']\n del abt['startdate']\n del abt['endtime']\n # convert some fields to float\n for k in ['from', 'to', 'by', 'sampling']:\n if k in abt:\n abt[k] = float(abt[k])\n # change space and dash in title to underscore\n abt['title'] = abt['title'].replace('-', '_').replace(' ', '_')\n return abt",
"def readBerSANS(filename):\n \"\"\"Read a header from a SANS file (produced usually by BerSANS)\"\"\"\n hed = {'Comment': ''}\n translate = {'Lambda': 'Wavelength',\n 'Title': 'Owner',\n 'SampleName': 'Title',\n 'BeamcenterX': 'BeamPosY',\n 'BeamcenterY': 'BeamPosX',\n 'Time': 'MeasTime',\n 'TotalTime': 'MeasTime',\n 'Moni1': 'Monitor',\n 'Moni2': 'Monitor',\n 'Moni': 'Monitor',\n 'Transmission': 'Transm',\n }\n with open(filename, 'rt') as f:\n comment_next = False\n for l in f:\n l = l.strip()\n if comment_next:\n hed['Comment'] = hed['Comment'] + '\\n' + l\n comment_next = False\n elif l.startswith('%Counts'):\n break\n elif l.startswith('%Comment'):\n comment_next = True\n elif l.startswith('%'):\n continue\n elif l.split('=', 1)[0] in translate:\n hed[translate[l.split('=', 1)[0]]] = misc.parse_number(\n l.split('=', 1)[1])\n else:\n try:\n hed[l.split('=', 1)[0]] = misc.parse_number(\n l.split('=', 1)[1])\n except IndexError:\n print(l.split('=', 1))\n if 'FileName' in hed:\n m = re.match('D(\\d+)\\.(\\d+)', hed['FileName'])\n if m is not None:\n hed['FSN'] = int(m.groups()[0])\n hed['suffix'] = int(m.groups()[1])\n if 'FileDate' in hed:\n hed['Date'] = dateutil.parser.parse(hed['FileDate'])\n if 'FileTime' in hed:\n hed['Date'] = datetime.datetime.combine(\n hed['Date'].date(), dateutil.parser.parse(hed['FileTime']).time())\n hed['__Origin__'] = 'BerSANS'\n if 'SD' in hed:\n hed['Dist'] = hed['SD'] * 1000\n if hed['Comment'].startswith('\\n'):\n hed['Comment'] = hed['Comment'][1:]\n hed['__particle__'] = 'neutron'\n hed['Wavelength'] *= 10 # convert from nanometres to Angstroems\n return hed",
"def get_external_references(self):\n \"\"\"\n Iterator that returns all the external reference objects of the external references object\n @rtype: L{CexternalReference}\n @return: the external reference objects\n \"\"\"\n for ext_ref_node in self.node.findall('externalRef'):\n ext_refs_obj = CexternalReference(ext_ref_node)\n for ref in ext_refs_obj:\n yield ref",
"public static SourceFile scanSingleFileConfig(CxxLanguage language, InputFile file, CxxConfiguration cxxConfig,\n SquidAstVisitor<Grammar>... visitors) {\n if (!file.isFile()) {\n throw new IllegalArgumentException(\"File '\" + file + \"' not found.\");\n }\n AstScanner<Grammar> scanner = create(language, cxxConfig, visitors);\n scanner.scanFile(file.file());\n Collection<SourceCode> sources = scanner.getIndex().search(new QueryByType(SourceFile.class));\n if (sources.size() != 1) {\n throw new IllegalStateException(\"Only one SourceFile was expected whereas \"\n + sources.size() + \" has been returned.\");\n }\n return (SourceFile) sources.iterator().next();\n }",
"public function search_references($reference) {\n global $DB;\n\n if (is_null($reference)) {\n throw new coding_exception('NULL is not a valid reference to an external file');\n }\n\n // Give {@link self::unpack_reference()} a chance to throw exception if the\n // reference is not in a valid format.\n self::unpack_reference($reference);\n\n $referencehash = sha1($reference);\n\n $sql = \"SELECT \".self::instance_sql_fields('f', 'r').\"\n FROM {files} f\n JOIN {files_reference} r ON f.referencefileid = r.id\n JOIN {repository_instances} ri ON r.repositoryid = ri.id\n WHERE r.referencehash = ?\n AND (f.component <> ? OR f.filearea <> ?)\";\n\n $rs = $DB->get_recordset_sql($sql, array($referencehash, 'user', 'draft'));\n $files = array();\n foreach ($rs as $filerecord) {\n $files[$filerecord->pathnamehash] = $this->get_file_instance($filerecord);\n }\n $rs->close();\n\n return $files;\n }",
"def _analyse_mat_sections(sections):\n \"\"\"\n Cases:\n - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary,\n no need to load external files\n - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag,\n any number following LOADDEDX flag is ignored.\n - ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX\n - ICRU flag missing, LOADDEDX flag missing -> nothing happens\n \"\"\"\n icru_numbers = []\n for section in sections:\n load_present = False\n load_value = False\n icru_value = False\n for e in section:\n split_line = e.split()\n if \"LOADDEDX\" in e:\n load_present = True\n if len(split_line) > 1:\n load_value = split_line[1] if \"!\" not in split_line[1] else False # ignore ! comments\n elif \"ICRU\" in e and len(split_line) > 1:\n icru_value = split_line[1] if \"!\" not in split_line[1] else False # ignore ! comments\n if load_present: # LOADDEDX is present, so external file is required\n if icru_value: # if ICRU value was given\n icru_numbers.append(icru_value)\n elif load_value: # if only LOADDEDX with values was present in section\n icru_numbers.append(load_value)\n return icru_numbers",
"void externalScan(Set<File> notifiedCreated, Set<File> notifiedDeleted, Set<File> notifiedModified, boolean doFilterPaths, String listenerFilter) {\n // Don't perform the external scan if this monitor holder is paused\n if (isStopped)\n return;\n\n // only do anything if this is an 'external' monitor\n if (!!!FileMonitor.MONITOR_TYPE_EXTERNAL.equals(monitorRef.getProperty(FileMonitor.MONITOR_TYPE)))\n return;\n\n // Give monitoring activity on other threads a chance to catch up before requesting a scan\n // (This is most likely to affect unit test behaviour rather than mbean invocations, but be safe)\n Thread.yield();\n\n // Multiple threads can call the FileNotificationMBean simultaneously so we need to lock\n scanLock.lock();\n try {\n // Always try destroy when we obtain the lock: it will return true if this is in destroy or destroyed state\n // Also (after we have tried doDestroy) ensure that we are in active state\n if (!doDestroy() && (monitorState.get() == MonitorState.ACTIVE.ordinal())) {\n if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {\n Tr.debug(this, tc, \"File monitor scan: begin\", updateMonitors);\n }\n\n List<File> actualCreated = new ArrayList<File>();\n List<File> actualDeleted = new ArrayList<File>();\n List<File> actualModified = new ArrayList<File>();\n\n scanForUpdates(actualCreated, actualDeleted, actualModified);\n\n // use the correct case forms of the files we found in our internal scan\n Set<File> created = PathUtils.fixPathFiles(actualCreated);\n Set<File> deleted = PathUtils.fixPathFiles(actualDeleted);\n Set<File> modified = PathUtils.fixPathFiles(actualModified);\n\n // SPI PathUtils.fixpathFiles returns an empty collection if the file\n // list is empty, create an actual set so we can add to it later if needed\n if (created == Collections.EMPTY_SET)\n created = new HashSet<File>();\n if (deleted == Collections.EMPTY_SET)\n deleted = new HashSet<File>();\n if (modified == Collections.EMPTY_SET)\n modified = new HashSet<File>();\n\n // Take the previously unnotified/unrequested changes\n // and resolve them against the result of the latest\n // filesystem scan to make sure they are still\n // valid\n resolveChangesForExternalScan(unnotifiedFileCreates,\n unnotifiedFileDeletes,\n unnotifiedFileModifies,\n created,\n deleted,\n modified);\n\n // Now merge the result of the current filesystem scan with\n // previous unnotified changes. This represents the complete\n // set of valid/current choices they can now notify about\n created.addAll(unnotifiedFileCreates);\n deleted.addAll(unnotifiedFileDeletes);\n modified.addAll(unnotifiedFileModifies);\n\n // We are going to rebuild these lists from anything left over in the next block\n unnotifiedFileCreates.clear();\n unnotifiedFileDeletes.clear();\n unnotifiedFileModifies.clear();\n\n // If a filter was specified, all pending updates are to be processed.\n if (doFilterPaths) {\n // Now take the notified changes and compare it against all the possible\n // valid choices, unrequested changes are placed into the unnotified set\n // so they can be used by the caller on subsequent calls\n filterSets(created, notifiedCreated, unnotifiedFileCreates);\n filterSets(deleted, notifiedDeleted, unnotifiedFileDeletes);\n filterSets(modified, notifiedModified, unnotifiedFileModifies);\n }\n\n if (!created.isEmpty() || !modified.isEmpty() || !deleted.isEmpty()) {\n // changes were discovered: trace & call the registered file monitor\n if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {\n Tr.debug(this, tc, \"File monitor scan: end; resources changed\",\n created.size() + \" created\",\n modified.size() + \" modified\",\n deleted.size() + \" deleted\");\n }\n\n if (monitor != null) {\n try {\n // If we are processing all pending events, call the extended version of the FileMonitor.\n if (!doFilterPaths && monitor instanceof com.ibm.ws.kernel.filemonitor.FileMonitor) {\n ((com.ibm.ws.kernel.filemonitor.FileMonitor) monitor).onChange(created, modified, deleted, listenerFilter);\n } else {\n monitor.onChange(created, modified, deleted);\n }\n\n } catch (RuntimeException e) {\n // FFDC instrumentation will go here\n // Catch the exception so we can FFDC it\n // Don't increment the exception counter since this is externally triggered\n Tr.warning(tc, \"fileMonitorException\", created, modified, deleted, monitor.getClass(), e.getLocalizedMessage());\n }\n }\n } else if (coreService.isDetailedScanTraceEnabled() && TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {\n // If super detailed trace is enabled, we trace the begin/end of all file scans\n Tr.debug(this, tc, \"File monitor scan: end; no changes\");\n }\n }\n } catch (RuntimeException e) {\n // TODO: MUST CATCH exceptions here (to at least get FFDC)... ick\n } finally {\n try {\n doDestroy(); // always attempt destroy while we hold the lock\n } finally {\n scanLock.unlock();\n }\n }\n\n }"
] |
[
0.7982855439186096,
0.7568964958190918,
0.6761327385902405,
0.6656328439712524,
0.6623234748840332,
0.6597821116447449,
0.6423473954200745,
0.6383647918701172,
0.6373353600502014,
0.636387825012207,
0.6359410285949707,
0.6359152793884277
] |
Scan SH12A GEO file for references to external files (like voxelised geometry) and return them
|
def _parse_geo_file(self, file_path, run_input_dir):
"""Scan SH12A GEO file for references to external files (like voxelised geometry) and return them"""
external_files = []
paths_to_replace = []
with open(file_path, 'r') as geo_f:
for line in geo_f.readlines():
split_line = line.split()
if len(split_line) > 0 and not line.startswith("*"):
base_path = os.path.join(self.input_path, split_line[0])
if os.path.isfile(base_path + '.hed'):
logger.debug("Found ctx + hed files: {0}".format(base_path))
external_files.append(base_path + '.hed')
# try to find ctx file
if os.path.isfile(base_path + '.ctx'):
external_files.append(base_path + '.ctx')
elif os.path.isfile(base_path + '.ctx.gz'):
external_files.append(base_path + '.ctx.gz')
# replace path to match symlink location
paths_to_replace.append(split_line[0])
if paths_to_replace:
run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1])
logger.debug("Calling rewrite_paths method on file: {0}".format(run_dir_config_file))
self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace)
return external_files
|
[
"def find_external_files(self, run_input_dir):\n \"\"\"\n Scan all SHIELDHIT12A config files to find external files used and return them.\n Also change paths in config files to match convention that all resources are\n symlinked in job_xxxx/symlink\n \"\"\"\n beam_file, geo_file, mat_file, _ = self.input_files\n\n # check for external files in BEAM input file\n external_beam_files = self._parse_beam_file(beam_file, run_input_dir)\n if external_beam_files:\n logger.info(\"External files from BEAM file: {0}\".format(external_beam_files))\n else:\n logger.debug(\"No external files from BEAM file\")\n\n # check for external files in MAT input file\n icru_numbers = self._parse_mat_file(mat_file)\n if icru_numbers:\n logger.info(\"External files from MAT file: {0}\".format(icru_numbers))\n else:\n logger.debug(\"No external files from MAT file\")\n # if ICRU+LOADEX pairs were found - get file names for external material files\n icru_files = []\n if icru_numbers:\n icru_files = self._decrypt_icru_files(icru_numbers)\n\n # check for external files in GEO input file\n geo_files = self._parse_geo_file(geo_file, run_input_dir)\n if geo_files:\n logger.info(\"External files from GEO file: {0}\".format(geo_files))\n else:\n logger.debug(\"No external files from GEO file\")\n\n external_files = external_beam_files + icru_files + geo_files\n return [os.path.join(self.input_path, e) for e in external_files]",
"def _parse_beam_file(self, file_path, run_input_dir):\n \"\"\"Scan SH12A BEAM file for references to external files and return them\"\"\"\n external_files = []\n paths_to_replace = []\n with open(file_path, 'r') as beam_f:\n for line in beam_f.readlines():\n split_line = line.split()\n # line length checking to prevent IndexError\n if len(split_line) > 2 and split_line[0] == \"USEBMOD\":\n logger.debug(\"Found reference to external file in BEAM file: {0} {1}\".format(\n split_line[0], split_line[2]))\n external_files.append(split_line[2])\n paths_to_replace.append(split_line[2])\n elif len(split_line) > 1 and split_line[0] == \"USECBEAM\":\n logger.debug(\"Found reference to external file in BEAM file: {0} {1}\".format(\n split_line[0], split_line[1]))\n external_files.append(split_line[1])\n paths_to_replace.append(split_line[1])\n if paths_to_replace:\n run_dir_config_file = os.path.join(run_input_dir, os.path.split(file_path)[-1])\n logger.debug(\"Calling rewrite_paths method on file: {0}\".format(run_dir_config_file))\n self._rewrite_paths_in_file(run_dir_config_file, paths_to_replace)\n return external_files",
"def _load_from_geo_ref(self, dsid):\n \"\"\"Load filenames from the N_GEO_Ref attribute of a dataset's file.\"\"\"\n file_handlers = self._get_file_handlers(dsid)\n if not file_handlers:\n return None\n\n fns = []\n for fh in file_handlers:\n base_dir = os.path.dirname(fh.filename)\n try:\n # get the filename and remove the creation time\n # which is often wrong\n fn = fh['/attr/N_GEO_Ref'][:46] + '*.h5'\n fns.extend(glob(os.path.join(base_dir, fn)))\n\n # usually is non-terrain corrected file, add the terrain\n # corrected file too\n if fn[:5] == 'GIMGO':\n fn = 'GITCO' + fn[5:]\n elif fn[:5] == 'GMODO':\n fn = 'GMTCO' + fn[5:]\n else:\n continue\n fns.extend(glob(os.path.join(base_dir, fn)))\n except KeyError:\n LOG.debug(\"Could not load geo-reference information from {}\".format(fh.filename))\n\n return fns",
"def get_refs(genome_build, aligner, galaxy_base, data):\n \"\"\"Retrieve the reference genome file location from galaxy configuration.\n \"\"\"\n out = {}\n name_remap = {\"samtools\": \"fasta\"}\n if genome_build:\n galaxy_config = _get_galaxy_tool_info(galaxy_base)\n for name in [x for x in (\"samtools\", aligner) if x]:\n galaxy_dt = _get_galaxy_data_table(name, galaxy_config[\"tool_data_table_config_path\"])\n loc_file, need_remap = _get_galaxy_loc_file(name, galaxy_dt, galaxy_config[\"tool_data_path\"],\n galaxy_base)\n cur_ref = _get_ref_from_galaxy_loc(name, genome_build, loc_file, galaxy_dt, need_remap,\n galaxy_config, data)\n base = os.path.normpath(utils.add_full_path(cur_ref, galaxy_config[\"tool_data_path\"]))\n # Expand directories unless we are an aligner like minimap2 that uses the seq directory\n if os.path.isdir(base) and not (need_remap and os.path.basename(base) == \"seq\"):\n indexes = sorted(glob.glob(os.path.join(base, \"*\")))\n elif name != \"samtools\":\n indexes = sorted(glob.glob(\"%s*\" % utils.splitext_plus(base)[0]))\n else:\n indexes = []\n name = name_remap.get(name, name)\n out[name] = {}\n if os.path.exists(base) and os.path.isfile(base):\n out[name][\"base\"] = base\n if indexes:\n out[name][\"indexes\"] = indexes\n # For references, add compressed inputs and indexes if they exist\n if name == \"fasta\" and \"base\" in out[name] and os.path.exists(out[name][\"base\"] + \".gz\"):\n indexes = [out[name][\"base\"] + \".gz.fai\", out[name][\"base\"] + \".gz.gzi\",\n utils.splitext_plus(out[name][\"base\"])[0] + \".dict\"]\n out[name + \"gz\"] = {\"base\": out[name][\"base\"] + \".gz\",\n \"indexes\": [x for x in indexes if os.path.exists(x)]}\n # add additional indices relative to the base\n if tz.get_in([\"fasta\", \"base\"], out):\n ref_dir, ref_filebase = os.path.split(out[\"fasta\"][\"base\"])\n rtg_dir = os.path.normpath(os.path.join(ref_dir, os.path.pardir, \"rtg\",\n \"%s.sdf\" % (os.path.splitext(ref_filebase)[0])))\n out[\"rtg\"] = {\"base\": os.path.join(rtg_dir, \"mainIndex\"),\n \"indexes\": [x for x in glob.glob(os.path.join(rtg_dir, \"*\"))\n if not x.endswith(\"/mainIndex\")]}\n twobit = os.path.normpath(os.path.join(ref_dir, os.path.pardir, \"ucsc\",\n \"%s.2bit\" % (os.path.splitext(ref_filebase)[0])))\n if os.path.exists(twobit):\n out[\"twobit\"] = twobit\n return out",
"def _get_ref_from_galaxy_loc(name, genome_build, loc_file, galaxy_dt, need_remap,\n galaxy_config, data):\n \"\"\"Retrieve reference genome file from Galaxy *.loc file.\n\n Reads from tool_data_table_conf.xml information for the index if it\n exists, otherwise uses heuristics to find line based on most common setups.\n \"\"\"\n refs = [ref for dbkey, ref in _galaxy_loc_iter(loc_file, galaxy_dt, need_remap)\n if dbkey == genome_build]\n remap_fn = alignment.TOOLS[name].remap_index_fn\n need_remap = remap_fn is not None\n if len(refs) == 0:\n raise ValueError(\"Did not find genome build %s in bcbio installation: %s\" %\n (genome_build, os.path.normpath(loc_file)))\n else:\n cur_ref = refs[-1]\n # Find genome directory and check for packed wf tarballs\n cur_ref_norm = os.path.normpath(utils.add_full_path(cur_ref, galaxy_config[\"tool_data_path\"]))\n base_dir_i = cur_ref_norm.find(\"/%s/\" % genome_build)\n base_dir = os.path.join(cur_ref_norm[:base_dir_i], genome_build)\n for tarball in glob.glob(os.path.join(base_dir, \"*-wf.tar.gz\")):\n cwlutils.unpack_tarballs(tarball, {\"dirs\": {\"work\": base_dir}}, use_subdir=False)\n if need_remap:\n assert remap_fn is not None, \"%s requires remapping function from base location file\" % name\n cur_ref = os.path.normpath(utils.add_full_path(cur_ref, galaxy_config[\"tool_data_path\"]))\n cur_ref = remap_fn(os.path.abspath(cur_ref))\n return cur_ref",
"def localize_shapefile(shp_href, dirs):\n \"\"\" Given a shapefile href and a set of directories, modify the shapefile\n name so it's correct with respect to the output and cache directories.\n \"\"\"\n # support latest mapnik features of auto-detection\n # of image sizes and jpeg reading support...\n # http://trac.mapnik.org/ticket/508\n\n mapnik_requires_absolute_paths = (MAPNIK_VERSION < 601)\n\n shp_href = urljoin(dirs.source.rstrip('/')+'/', shp_href)\n scheme, host, path, p, q, f = urlparse(shp_href)\n \n if scheme in ('http','https'):\n msg('%s | %s' % (shp_href, dirs.cache))\n scheme, path = '', locally_cache_remote_file(shp_href, dirs.cache)\n else:\n host = None\n \n # collect drive for windows\n to_posix(systempath.realpath(path))\n\n if scheme not in ('file', ''):\n raise Exception(\"Shapefile needs to be local, not %s\" % shp_href)\n \n if mapnik_requires_absolute_paths:\n path = posixpath.realpath(path)\n original = path\n\n path = dirs.output_path(path)\n \n if path.endswith('.zip'):\n # unzip_shapefile_into needs a path it can find\n path = posixpath.join(dirs.output, path)\n path = unzip_shapefile_into(path, dirs.cache, host)\n\n return dirs.output_path(path)",
"def shp2geom(shp_fn):\n \"\"\"Extract geometries from input shapefile\n \n Need to handle multi-part geom: http://osgeo-org.1560.x6.nabble.com/Multipart-to-singlepart-td3746767.html\n \"\"\"\n ds = ogr.Open(shp_fn)\n lyr = ds.GetLayer()\n srs = lyr.GetSpatialRef()\n lyr.ResetReading()\n geom_list = []\n for feat in lyr:\n geom = feat.GetGeometryRef()\n geom.AssignSpatialReference(srs)\n #Duplicate the geometry, or segfault\n #See: http://trac.osgeo.org/gdal/wiki/PythonGotchas\n #g = ogr.CreateGeometryFromWkt(geom.ExportToWkt())\n #g.AssignSpatialReference(srs)\n g = geom_dup(geom)\n geom_list.append(g)\n #geom = ogr.ForceToPolygon(' '.join(geom_list)) \n #Dissolve should convert multipolygon to single polygon \n #return geom_list[0]\n ds = None\n return geom_list",
"def checkDGEOFile(filenames):\n \"\"\"\n Verify that input file has been updated with NPOLFILE\n\n This function checks for the presence of 'NPOLFILE' kw in the primary header\n when 'DGEOFILE' kw is present and valid (i.e. 'DGEOFILE' is not blank or 'N/A').\n It handles the case of science files downloaded from the archive before the new\n software was installed there.\n If 'DGEOFILE' is present and 'NPOLFILE' is missing, print a message and let the user\n choose whether to (q)uit and update the headers or (c)ontinue and run astrodrizzle\n without the non-polynomial correction.\n 'NPOLFILE' will be populated in the pipeline before astrodrizzle is run.\n\n In the case of WFPC2 the old style dgeo files are used to create detector to image\n correction at runtime.\n\n Parameters\n ----------\n filenames : list of str\n file names of all images to be checked\n\n \"\"\"\n\n msg = \"\"\"\n A 'DGEOFILE' keyword is present in the primary header but 'NPOLFILE' keyword was not found.\n This version of the software uses a new format for the residual distortion DGEO files.\n Please consult the instrument web pages for which reference files to download.\n A small (new style) dgeofile is needed ('_npl.fits' extension) and possibly a\n detector to image correction file ('_d2i.fits' extension).\n The names of these files must be added to the primary header either using the task XXXX\n or manually, for example:\n\n hedit {0:s}[0] npolfile fname_npl.fits add+\n hedit {0:s}[0] d2imfile fname_d2i.fits add+\n\n where fname_npl.fits is the name of the new style dgeo file and fname_d2i.fits is\n the name of the detector to image correction. After adding these keywords to the\n primary header, updatewcs must be run to update the science files:\n\n from stwcs import updatewcs\n updatewcs.updatewcs(\"{0:s}\")\n\n Alternatively you may choose to run astrodrizzle without DGEO and detector to image correction.\n\n To stop astrodrizzle and update the dgeo files, type 'q'.\n To continue running astrodrizzle without the non-polynomial distortion correction, type 'c':\n \"\"\"\n\n short_msg = \"\"\"\n To stop astrodrizzle and update the dgeo files, type 'q'.\n To continue running astrodrizzle without the non-polynomial distortion correction, type 'c':\n \"\"\"\n\n for inputfile in filenames:\n try:\n dgeofile = fits.getval(inputfile, 'DGEOFILE', memmap=False)\n except KeyError:\n continue\n if dgeofile not in [\"N/A\", \"n/a\", \"\"]:\n message = msg.format(inputfile)\n try:\n npolfile = fits.getval(inputfile, 'NPOLFILE', memmap=False)\n except KeyError:\n ustop = userStop(message)\n while ustop is None:\n ustop = userStop(short_msg)\n if ustop:\n return None\n\n return filenames",
"def get_genome_ref(genome_build, aligner, galaxy_base):\n \"\"\"Retrieve the reference genome file location from galaxy configuration.\n \"\"\"\n ref_files = dict(\n bowtie = \"bowtie_indices.loc\",\n bwa = \"bwa_index.loc\",\n samtools = \"sam_fa_indices.loc\",\n maq = \"bowtie_indices.loc\")\n remap_fns = dict(\n maq = _remap_to_maq\n )\n out_info = []\n for ref_get in [aligner, \"samtools\"]:\n ref_file = os.path.join(galaxy_base, \"tool-data\", ref_files[ref_get])\n with open(ref_file) as in_handle:\n for line in in_handle:\n if not line.startswith(\"#\"):\n parts = line.strip().split()\n if parts[0] == \"index\":\n parts = parts[1:]\n if parts[0] == genome_build:\n out_info.append(parts[-1])\n break\n try:\n out_info[-1] = remap_fns[ref_get](out_info[-1])\n except KeyError:\n pass\n except IndexError:\n raise IndexError(\"Genome %s not found in %s\" % (genome_build,\n ref_file))\n\n if len(out_info) != 2:\n raise ValueError(\"Did not find genome reference for %s %s\" %\n (genome_build, aligner))\n else:\n return tuple(out_info)",
"def viirs(scans_nb, scan_indices=slice(0, None),\n chn_pixels=6400, scan_lines=32, scan_step=1):\n \"\"\"Describe VIIRS instrument geometry, I-band by default.\n VIIRS scans several lines simultaneously (there are 16 detectors for each\n M-band, 32 detectors for each I-band) so the scan angles (and times) are\n two-dimensional arrays, contrary to AVHRR for example.\n\n scan_step: The increment in number of scans. E.g. if scan_step is 100 and\n the number of scans (scans_nb) is 10 then these 10 scans are\n distributed over the swath so that between each scan there are\n 99 emtpy (excluded) scans\n\n \"\"\"\n\n entire_width = np.arange(chn_pixels)\n scan_points = entire_width[scan_indices.astype('int')]\n scan_pixels = len(scan_points)\n\n ''' initial angle 55.84 deg replaced with 56.28 deg found in\n VIIRS User's Guide from NESDIS, version 1.2 (09/10/2013).\n Ref : NOAA Technical Report NESDIS 142.\n Seems to be better (not quantified)'''\n across_track = \\\n (scan_points / (chn_pixels / 2. - 0.5) - 1) * np.deg2rad(-56.28)\n y_max_angle = np.arctan2(11.87 / 2, 824.0)\n along_track = \\\n -(np.arange(scan_lines) / (scan_lines / 2. - 0.5) - 1) * \\\n y_max_angle\n scan = np.dstack((np.tile(across_track, (scan_lines, 1)).T,\n np.tile(along_track, (scan_pixels, 1))))\n npp = np.tile(scan, [scans_nb, 1]).T\n\n # from the timestamp in the filenames, a granule takes 1:25.400 to record\n # (85.4 seconds) so 1.779166667 would be the duration of 1 scanline (48\n # scans per granule) dividing the duration of a single scan by a width of\n # 6400 pixels results in 0.0002779947917 seconds for each column of 32\n # pixels in the scanline\n\n # the individual times per pixel are probably wrong, unless the scanning\n # behaves the same as for AVHRR, The VIIRS sensor rotates to allow internal\n # calibration before each scanline. This would imply that the scanline\n # always moves in the same direction. more info @\n # http://www.eoportal.org/directory/pres_NPOESSNationalPolarorbitingOperationalEnvironmentalSatelliteSystem.html\n\n SEC_EACH_SCANCOLUMN = 0.0002779947917\n sec_scan_duration = 1.779166667\n times = np.tile(scan_points * SEC_EACH_SCANCOLUMN,\n [np.int(scans_nb*scan_lines), 1])\n offset = np.repeat(np.arange(scans_nb) *\n sec_scan_duration*scan_step, scan_lines)\n times += np.expand_dims(offset, 1)\n\n # build the scan geometry object\n return ScanGeometry(npp, times)",
"def read_external_annotation(fn):\n \"\"\"Read file with junctions from some database. This does not have to be the\n same splice junction database used with STAR.\n\n Parameters\n ----------\n fn : filename str\n File with splice junctions from annotation. The file should have a\n header and contain the following columns: 'gene', 'chrom', 'start',\n 'end', 'strand', 'chrom:start', 'chrom:end', 'donor', 'acceptor', \n 'intron'.\n\n Returns\n -------\n extDF : pandas.DataFrame\n DataFrame indexed by splice junction\n\n stats : list of strings\n Human readable statistics about the external database.\n \n \"\"\"\n assert os.path.exists(fn)\n extDF = pd.read_table(fn, index_col=0, header=0)\n total_num = extDF.shape[0]\n \n # In rare cases, a splice junction might be used by more than one gene. For\n # my purposes, these cases are confounding, so I will remove all such splice\n # junctions. \n intron_count = extDF.intron.value_counts()\n extDF['intron_count'] = extDF.intron.apply(lambda x: intron_count.ix[x])\n extDF = extDF[extDF.intron_count == 1]\n extDF = extDF.drop('intron_count', axis=1)\n\n stats = []\n stats.append('External database stats')\n stats.append('Read external annotation\\t{}'.format(fn))\n stats.append('Total number of junctions\\t{:,}'.format(total_num))\n stats.append(('Number of junctions used in only one '\n 'gene\\t{:,}').format(extDF.shape[0]))\n\n return extDF, stats",
"def _get_galaxy_loc_file(name, galaxy_dt, ref_dir, galaxy_base):\n \"\"\"Retrieve Galaxy *.loc file for the given reference/aligner name.\n\n First tries to find an aligner specific *.loc file. If not defined\n or does not exist, then we need to try and remap it from the\n default reference file\n \"\"\"\n if \"file\" in galaxy_dt and os.path.exists(os.path.join(galaxy_base, galaxy_dt[\"file\"])):\n loc_file = os.path.join(galaxy_base, galaxy_dt[\"file\"])\n need_remap = False\n elif alignment.TOOLS[name].galaxy_loc_file is None:\n loc_file = os.path.join(ref_dir, alignment.BASE_LOCATION_FILE)\n need_remap = True\n else:\n loc_file = os.path.join(ref_dir, alignment.TOOLS[name].galaxy_loc_file)\n need_remap = False\n if not os.path.exists(loc_file):\n loc_file = os.path.join(ref_dir, alignment.BASE_LOCATION_FILE)\n need_remap = True\n return loc_file, need_remap"
] |
[
0.7469627261161804,
0.7438340187072754,
0.7095581293106079,
0.6927589774131775,
0.6735621690750122,
0.6709260940551758,
0.665416955947876,
0.6626731157302856,
0.6550756096839905,
0.6497797966003418,
0.6456049680709839,
0.6410051584243774
] |
Scan SH12A MAT file for ICRU+LOADEX pairs and return found ICRU numbers
|
def _parse_mat_file(self, file_path):
"""Scan SH12A MAT file for ICRU+LOADEX pairs and return found ICRU numbers"""
mat_file_sections = self._extract_mat_sections(file_path)
return self._analyse_mat_sections(mat_file_sections)
|
[
"def _analyse_mat_sections(sections):\n \"\"\"\n Cases:\n - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary,\n no need to load external files\n - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag,\n any number following LOADDEDX flag is ignored.\n - ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX\n - ICRU flag missing, LOADDEDX flag missing -> nothing happens\n \"\"\"\n icru_numbers = []\n for section in sections:\n load_present = False\n load_value = False\n icru_value = False\n for e in section:\n split_line = e.split()\n if \"LOADDEDX\" in e:\n load_present = True\n if len(split_line) > 1:\n load_value = split_line[1] if \"!\" not in split_line[1] else False # ignore ! comments\n elif \"ICRU\" in e and len(split_line) > 1:\n icru_value = split_line[1] if \"!\" not in split_line[1] else False # ignore ! comments\n if load_present: # LOADDEDX is present, so external file is required\n if icru_value: # if ICRU value was given\n icru_numbers.append(icru_value)\n elif load_value: # if only LOADDEDX with values was present in section\n icru_numbers.append(load_value)\n return icru_numbers",
"def _decrypt_icru_files(numbers):\n \"\"\"Find matching file names for given ICRU numbers\"\"\"\n import json\n icru_file = resource_string(__name__, os.path.join('data', 'SH12A_ICRU_table.json'))\n ref_dict = json.loads(icru_file.decode('ascii'))\n try:\n return [ref_dict[e] for e in numbers]\n except KeyError as er:\n logger.error(\"There is no ICRU file for id: {0}\".format(er))\n raise",
"def loadmat(filename):\n \"\"\"This function should be called instead of direct spio.loadmat\n as it cures the problem of not properly recovering python dictionaries\n from mat files. It calls the function check keys to cure all entries\n which are still mat-objects\n \"\"\"\n data = sploadmat(filename, struct_as_record=False, squeeze_me=True)\n return _check_keys(data)",
"def find_external_files(self, run_input_dir):\n \"\"\"\n Scan all SHIELDHIT12A config files to find external files used and return them.\n Also change paths in config files to match convention that all resources are\n symlinked in job_xxxx/symlink\n \"\"\"\n beam_file, geo_file, mat_file, _ = self.input_files\n\n # check for external files in BEAM input file\n external_beam_files = self._parse_beam_file(beam_file, run_input_dir)\n if external_beam_files:\n logger.info(\"External files from BEAM file: {0}\".format(external_beam_files))\n else:\n logger.debug(\"No external files from BEAM file\")\n\n # check for external files in MAT input file\n icru_numbers = self._parse_mat_file(mat_file)\n if icru_numbers:\n logger.info(\"External files from MAT file: {0}\".format(icru_numbers))\n else:\n logger.debug(\"No external files from MAT file\")\n # if ICRU+LOADEX pairs were found - get file names for external material files\n icru_files = []\n if icru_numbers:\n icru_files = self._decrypt_icru_files(icru_numbers)\n\n # check for external files in GEO input file\n geo_files = self._parse_geo_file(geo_file, run_input_dir)\n if geo_files:\n logger.info(\"External files from GEO file: {0}\".format(geo_files))\n else:\n logger.debug(\"No external files from GEO file\")\n\n external_files = external_beam_files + icru_files + geo_files\n return [os.path.join(self.input_path, e) for e in external_files]",
"static AtomTypePattern[] loadPatterns(InputStream smaIn) throws IOException {\n\n List<AtomTypePattern> matchers = new ArrayList<AtomTypePattern>();\n\n BufferedReader br = new BufferedReader(new InputStreamReader(smaIn));\n String line = null;\n while ((line = br.readLine()) != null) {\n if (skipLine(line)) continue;\n String[] cols = line.split(\" \");\n String sma = cols[0];\n String symb = cols[1];\n try {\n matchers.add(new AtomTypePattern(SmartsPattern.create(sma).setPrepare(false), symb));\n } catch (IllegalArgumentException ex) {\n throw new IOException(ex);\n }\n }\n\n return matchers.toArray(new AtomTypePattern[matchers.size()]);\n }",
"def _loadedges(self) -> typing.Tuple[np.ndarray, np.ndarray, np.ndarray, float, np.ndarray]:\n \"\"\"\n Attempts to intelligently load the .mat file and take average of left and right edges\n\n :return: left and right averages\n :return: times for each column\n :return: accept/reject for each column\n :return: pixel-inch ratio\n \"\"\"\n data = sco.loadmat(self.filename)\n datakeys = [k for k in data.keys()\n if ('right' in k) or ('left' in k) or ('edge' in k)]\n averagedata = ((data[datakeys[0]] + data[datakeys[1]]) / 2)\n\n try:\n times = (data['times'] - data['times'].min())[0]\n except KeyError:\n times = np.arange(len(data[datakeys[0]][0]))\n\n try:\n accept = data['accept']\n except KeyError:\n accept = np.zeros(len(times))\n\n try:\n ratio = data['ratio']\n except KeyError:\n ratio = 1\n\n try:\n viscosity = data['viscosity']\n except KeyError:\n viscosity = np.ones(len(times))\n return averagedata, times, accept, ratio, viscosity",
"def __loadindcomps(self):\n ''' import industry comps '''\n csv_path = os.path.join(os.path.dirname(__file__), self.stock_no_files)\n with open(csv_path) as csv_file:\n csv_data = csv.reader(csv_file)\n result = {}\n check_words = re.compile(r'^[\\d]{2,}[\\w]?')\n for i in csv_data:\n if check_words.match(i[2]):\n try:\n result[i[2]].append(i[0].decode('utf-8'))\n except (ValueError, KeyError):\n try:\n result[i[2]] = [i[0].decode('utf-8')]\n except KeyError:\n pass\n return result",
"def detect_Lacourse2018(dat_orig, s_freq, time, opts):\n \"\"\"Spindle detection based on Lacourse et al., 2018\n \n Parameters\n ----------\n dat_orig : ndarray (dtype='float')\n vector with the data for one channel\n s_freq : float\n sampling frequency\n time : ndarray (dtype='float')\n vector with the time points for each sample\n opts : instance of 'DetectSpindle'\n \n Returns\n -------\n list of dict\n list of detected spindles\n dict\n 'det_value_lo' with detection value, 'det_value_hi' is nan,\n 'sel_value' is nan (for consistency with other methods)\n float\n spindle density, per 30-s epoch\n\n References\n ----------\n Lacourse, K. et al. J. Neurosci. Meth. (2018).\n \"\"\"\n # Downsample z-score parameters, tolerance\n step = opts.windowing['step']\n if step:\n ds_freq = int(1 / step) # downsampled sampling frequency\n opts.zscore['dur'] *= opts.windowing['step']\n opts.tolerance *= opts.windowing['step']\n if opts.zscore['step']:\n opts.zscore['step'] *= opts.windowing['step']\n else:\n ds_freq = s_freq\n\n \n # Absolute sigma power\n dat_sigma = transform_signal(dat_orig, s_freq, 'double_sosbutter', \n opts.det_butter)\n dat_det = transform_signal(dat_sigma, s_freq, 'moving_ms', opts.moving_ms)\n dat_det[dat_det <= 0] = 0.000000001\n abs_sig_pow = log10(dat_det)\n # Option to adapt the absolute threshold, for low-amplitude recordings\n if opts.abs_pow_thresh < 0:\n opts.abs_pow_thresh = (mean(abs_sig_pow) - \n opts.abs_pow_thresh * std(abs_sig_pow))\n abs_sig_pow = transform_signal(abs_sig_pow, ds_freq, 'smooth', opts.smooth)\n \n # Relative sigma power\n dat_det = transform_signal(dat_orig, s_freq, 'moving_power_ratio', \n opts.moving_power_ratio)\n dat_det[dat_det <= 0] = 0.000000001\n dat_det = log10(dat_det)\n rel_sig_pow = transform_signal(dat_det, s_freq, 'moving_zscore', \n opts.zscore)\n rel_sig_pow = transform_signal(rel_sig_pow, ds_freq, 'smooth', opts.smooth)\n \n # Sigma covariance\n dat_broad = transform_signal(dat_orig, s_freq, 'double_sosbutter', \n opts.det_butter2)\n dat_covar = transform_signal(dat_sigma, s_freq, 'moving_covar', \n opts.moving_covar, dat2=dat_broad)\n dat_det = dat_covar.copy()\n dat_det[dat_det < 0] = 0 # negative covariances are discarded\n dat_det = log10(dat_det + 1) # add 1 to avoid -inf\n sigma_covar = transform_signal(dat_det, s_freq, 'moving_zscore', \n opts.zscore)\n sigma_covar = transform_signal(sigma_covar, ds_freq, 'smooth', opts.smooth)\n \n # Sigma correlation\n dat_sd_broad = transform_signal(dat_broad, s_freq, 'moving_sd', \n opts.moving_sd)\n dat_sd_sigma = transform_signal(dat_sigma, s_freq, 'moving_sd', \n opts.moving_sd)\n dat_sd_broad[dat_sd_broad == 0] = 0.000000001\n dat_sd_sigma[dat_sd_sigma == 0] = 0.000000001\n sigma_corr = dat_covar / (dat_sd_broad * dat_sd_sigma)\n sigma_corr = transform_signal(sigma_corr, ds_freq, 'smooth', opts.smooth)\n\n # Thresholding\n abs_and_cov = logical_and(abs_sig_pow >= opts.abs_pow_thresh,\n sigma_covar >= opts.covar_thresh)\n concensus = logical_and.reduce((rel_sig_pow >= opts.rel_pow_thresh,\n sigma_corr >= opts.corr_thresh,\n abs_and_cov)) \n events = detect_events(concensus, 'custom') # at s_freq * 0.1\n \n if events is not None:\n events = _merge_close(dat_sigma, events, time, opts.tolerance)\n events = _select_period(events, abs_and_cov)\n \n if opts.windowing['step']:\n events = events * (s_freq * opts.windowing['step']) # upsample\n events = asarray(around(events), dtype=int)\n \n events = within_duration(events, time, opts.duration)\n events = _merge_close(dat_sigma, events, time, opts.min_interval)\n events = remove_straddlers(events, time, s_freq)\n\n power_peaks = peak_in_power(events, dat_orig, s_freq, opts.power_peaks)\n powers = power_in_band(events, dat_orig, s_freq, opts.frequency)\n sp_in_chan = make_spindles(events, power_peaks, powers, dat_sigma,\n dat_orig, time, s_freq)\n\n else:\n lg.info('No spindle found')\n sp_in_chan = []\n\n values = {'abs_pow_thresh': opts.abs_pow_thresh, \n 'rel_pow_thresh': opts.rel_pow_thresh, \n 'covar_thresh': opts.covar_thresh,\n 'corr_thresh': opts.corr_thresh}\n\n density = len(sp_in_chan) * s_freq * 30 / len(dat_orig)\n\n return sp_in_chan, values, density",
"def se_iban_load_map(filename: str) -> list:\n \"\"\"\n Loads Swedish monetary institution codes in CSV format.\n :param filename: CSV file name of the BIC definitions.\n Columns: Institution Name, Range Begin-Range End (inclusive), Account digits count\n :return: List of (bank name, clearing code begin, clearing code end, account digits)\n \"\"\"\n out = []\n name_repl = {\n 'BNP Paribas Fortis SA/NV, Bankfilial Sverige': 'BNP Paribas Fortis SA/NV',\n 'Citibank International Plc, Sweden Branch': 'Citibank International Plc',\n 'Santander Consumer Bank AS (deltar endast i Dataclearingen)': 'Santander Consumer Bank AS',\n 'Nordax Bank AB (deltar endast i Dataclearingen)': 'Nordax Bank AB',\n 'Swedbank och fristående Sparbanker, t ex Leksands Sparbank och Roslagsbanken.': 'Swedbank',\n 'Ålandsbanken Abp (Finland),svensk filial': 'Ålandsbanken Abp',\n 'SBAB deltar endast i Dataclearingen': 'SBAB',\n }\n with open(filename) as fp:\n for row in csv.reader(fp):\n if len(row) == 3:\n name, series, acc_digits = row\n # pprint([name, series, acc_digits])\n\n # clean up name\n name = re.sub(r'\\n.*', '', name)\n if name in name_repl:\n name = name_repl[name]\n\n # clean up series\n ml_acc_digits = acc_digits.split('\\n')\n for i, ser in enumerate(series.split('\\n')):\n begin, end = None, None\n res = re.match(r'^(\\d+)-(\\d+).*$', ser)\n if res:\n begin, end = res.group(1), res.group(2)\n if begin is None:\n res = re.match(r'^(\\d{4}).*$', ser)\n if res:\n begin = res.group(1)\n end = begin\n\n if begin and end:\n digits = None\n try:\n digits = int(acc_digits)\n except ValueError:\n pass\n if digits is None:\n try:\n digits = int(ml_acc_digits[i])\n except ValueError:\n digits = '?'\n except IndexError:\n digits = '?'\n\n out.append([name, begin, end, digits])\n # print('OK!')\n return out",
"def loadCoeffs(filename):\n \"\"\"\n load igrf12 coeffs from file\n :param filename: file which save coeffs (str)\n :return: g and h list one by one (list(float))\n \"\"\"\n gh = []\n gh2arr = []\n with open(filename) as f:\n text = f.readlines()\n for a in text:\n if a[:2] == 'g ' or a[:2] == 'h ':\n b = a.split()[3:]\n b = [float(x) for x in b]\n gh2arr.append(b)\n gh2arr = np.array(gh2arr).transpose()\n N = len(gh2arr)\n for i in range(N):\n if i < 19:\n for j in range(120):\n gh.append(gh2arr[i][j])\n else:\n for p in gh2arr[i]:\n gh.append(p)\n gh.append(0)\n return gh",
"def r_get_numbers(matchgroup, num):\n \"\"\"A helper function which can be used similarly to fscanf(fid,'%f',num) to extract num arguments from the regex iterator\"\"\"\n res = []\n for i in range(num):\n res.append(float(matchgroup.next().group()))\n return np.array(res)",
"def load_bcah98_mass_radius (tablelines, metallicity=0, heliumfrac=0.275,\n age_gyr=5., age_tol=0.05):\n \"\"\"Load mass and radius from the main data table for the famous models of\n Baraffe+ (1998A&A...337..403B).\n\n tablelines\n An iterable yielding lines from the table data file.\n I've named the file '1998A&A...337..403B_tbl1-3.dat'\n in some repositories (it's about 150K, not too bad).\n metallicity\n The metallicity of the model to select.\n heliumfrac\n The helium fraction of the model to select.\n age_gyr\n The age of the model to select, in Gyr.\n age_tol\n The tolerance on the matched age, in Gyr.\n\n Returns: (mass, radius), where both are Numpy arrays.\n\n The ages in the data table vary slightly at fixed metallicity and helium\n fraction. Therefore, there needs to be a tolerance parameter for matching\n the age.\n\n \"\"\"\n mdata, rdata = [], []\n\n for line in tablelines:\n a = line.strip ().split ()\n\n thismetallicity = float (a[0])\n if thismetallicity != metallicity:\n continue\n\n thisheliumfrac = float (a[1])\n if thisheliumfrac != heliumfrac:\n continue\n\n thisage = float (a[4])\n if abs (thisage - age_gyr) > age_tol:\n continue\n\n mass = float (a[3]) * cgs.msun\n teff = float (a[5])\n mbol = float (a[7])\n\n # XXX to check: do they specify m_bol_sun = 4.64? IIRC, yes.\n lbol = 10**(0.4 * (4.64 - mbol)) * cgs.lsun\n area = lbol / (cgs.sigma * teff**4)\n r = np.sqrt (area / (4 * np.pi))\n\n mdata.append (mass)\n rdata.append (r)\n\n return np.asarray (mdata), np.asarray (rdata)"
] |
[
0.8263450264930725,
0.7812528610229492,
0.7005556225776672,
0.6722555160522461,
0.6598689556121826,
0.6553063988685608,
0.6534486413002014,
0.6525524854660034,
0.6503617763519287,
0.6435152888298035,
0.6423606872558594,
0.6421538591384888
] |
Cases:
- ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary,
no need to load external files
- ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag,
any number following LOADDEDX flag is ignored.
- ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX
- ICRU flag missing, LOADDEDX flag missing -> nothing happens
|
def _analyse_mat_sections(sections):
"""
Cases:
- ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary,
no need to load external files
- ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag,
any number following LOADDEDX flag is ignored.
- ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX
- ICRU flag missing, LOADDEDX flag missing -> nothing happens
"""
icru_numbers = []
for section in sections:
load_present = False
load_value = False
icru_value = False
for e in section:
split_line = e.split()
if "LOADDEDX" in e:
load_present = True
if len(split_line) > 1:
load_value = split_line[1] if "!" not in split_line[1] else False # ignore ! comments
elif "ICRU" in e and len(split_line) > 1:
icru_value = split_line[1] if "!" not in split_line[1] else False # ignore ! comments
if load_present: # LOADDEDX is present, so external file is required
if icru_value: # if ICRU value was given
icru_numbers.append(icru_value)
elif load_value: # if only LOADDEDX with values was present in section
icru_numbers.append(load_value)
return icru_numbers
|
[
"public static int cuModuleLoadDataEx (CUmodule phMod, Pointer p, int numOptions, int options[], Pointer optionValues)\r\n {\r\n // Although it should be possible to pass 'null' for these parameters\r\n // when numOptions==0, the driver crashes when they are 'null', so\r\n // they are replaced by non-null (but empty) arrays here.\r\n // Also see the corresponding notes in the native method.\r\n if (numOptions == 0)\r\n {\r\n if (options == null)\r\n {\r\n options = new int[0];\r\n }\r\n if (optionValues == null)\r\n {\r\n optionValues = Pointer.to(new int[0]);\r\n }\r\n }\r\n return checkResult(cuModuleLoadDataExNative(\r\n phMod, p, numOptions, options, optionValues));\r\n }",
"public static int cuModuleLoadDataEx(CUmodule phMod, String string, int numOptions, int options[], Pointer optionValues)\r\n {\r\n byte bytes[] = string.getBytes();\r\n byte image[] = Arrays.copyOf(bytes, bytes.length+1);\r\n return cuModuleLoadDataEx(phMod, Pointer.to(image), numOptions, options, optionValues);\r\n }",
"def load_data(self, idx):\n \"\"\"Load the internal data of all sequences. Load from file if the\n corresponding disk flag is activated, otherwise load from RAM.\"\"\"\n for name in self:\n ndim = getattr(self, '_%s_ndim' % name)\n diskflag = getattr(self, '_%s_diskflag' % name)\n ramflag = getattr(self, '_%s_ramflag' % name)\n if diskflag:\n file_ = getattr(self, '_%s_file' % name)\n length_tot = 1\n shape = []\n for jdx in range(ndim):\n length = getattr(self, '_%s_length_%s' % (name, jdx))\n length_tot *= length\n shape.append(length)\n raw = file_.read(length_tot*8)\n values = struct.unpack(length_tot*'d', raw)\n if ndim:\n values = numpy.array(values).reshape(shape)\n else:\n values = values[0]\n elif ramflag:\n array = getattr(self, '_%s_array' % name)\n values = array[idx]\n if diskflag or ramflag:\n if ndim == 0:\n setattr(self, name, values)\n else:\n getattr(self, name)[:] = values",
"@NonNull\n private synchronized CrashReportData legacyLoad(@NonNull Reader reader) throws IOException {\n int mode = NONE, unicode = 0, count = 0;\n char nextChar;\n char[] buf = new char[40];\n int offset = 0, keyLength = -1, intVal;\n boolean firstChar = true;\n\n final CrashReportData crashData = new CrashReportData();\n final BufferedReader br = new BufferedReader(reader, ACRAConstants.DEFAULT_BUFFER_SIZE_IN_BYTES);\n try {\n while (true) {\n intVal = br.read();\n if (intVal == -1) {\n break;\n }\n nextChar = (char) intVal;\n\n if (offset == buf.length) {\n final char[] newBuf = new char[buf.length * 2];\n System.arraycopy(buf, 0, newBuf, 0, offset);\n buf = newBuf;\n }\n if (mode == UNICODE) {\n final int digit = Character.digit(nextChar, 16);\n if (digit >= 0) {\n unicode = (unicode << 4) + digit;\n if (++count < 4) {\n continue;\n }\n } else if (count <= 4) {\n // luni.09=Invalid Unicode sequence: illegal character\n throw new IllegalArgumentException(\"luni.09\");\n }\n mode = NONE;\n buf[offset++] = (char) unicode;\n if (nextChar != '\\n' && nextChar != '\\u0085') {\n continue;\n }\n }\n if (mode == SLASH) {\n mode = NONE;\n switch (nextChar) {\n case '\\r':\n mode = CONTINUE; // Look for a following \\n\n continue;\n case '\\u0085':\n case '\\n':\n mode = IGNORE; // Ignore whitespace on the next line\n continue;\n case 'b':\n nextChar = '\\b';\n break;\n case 'f':\n nextChar = '\\f';\n break;\n case 'n':\n nextChar = '\\n';\n break;\n case 'r':\n nextChar = '\\r';\n break;\n case 't':\n nextChar = '\\t';\n break;\n case 'u':\n mode = UNICODE;\n unicode = count = 0;\n continue;\n }\n } else {\n switch (nextChar) {\n case '#':\n case '!':\n if (firstChar) {\n while (true) {\n intVal = br.read();\n if (intVal == -1) {\n break;\n }\n nextChar = (char) intVal; // & 0xff\n // not\n // required\n if (nextChar == '\\r' || nextChar == '\\n' || nextChar == '\\u0085') {\n break;\n }\n }\n continue;\n }\n break;\n case '\\n':\n if (mode == CONTINUE) { // Part of a \\r\\n sequence\n mode = IGNORE; // Ignore whitespace on the next line\n continue;\n }\n // fall into the next case\n case '\\u0085':\n case '\\r':\n mode = NONE;\n firstChar = true;\n if (offset > 0 || (offset == 0 && keyLength == 0)) {\n if (keyLength == -1) {\n keyLength = offset;\n }\n final String temp = new String(buf, 0, offset);\n putKeyValue(crashData, temp.substring(0, keyLength), temp.substring(keyLength));\n }\n keyLength = -1;\n offset = 0;\n continue;\n case '\\\\':\n if (mode == KEY_DONE) {\n keyLength = offset;\n }\n mode = SLASH;\n continue;\n case ':':\n case '=':\n if (keyLength == -1) { // if parsing the key\n mode = NONE;\n keyLength = offset;\n continue;\n }\n break;\n }\n if (Character.isWhitespace(nextChar)) {\n if (mode == CONTINUE) {\n mode = IGNORE;\n }\n // if key length == 0 or value length == 0\n if (offset == 0 || offset == keyLength || mode == IGNORE) {\n continue;\n }\n if (keyLength == -1) { // if parsing the key\n mode = KEY_DONE;\n continue;\n }\n }\n if (mode == IGNORE || mode == CONTINUE) {\n mode = NONE;\n }\n }\n firstChar = false;\n if (mode == KEY_DONE) {\n keyLength = offset;\n mode = NONE;\n }\n buf[offset++] = nextChar;\n }\n if (mode == UNICODE && count <= 4) {\n // luni.08=Invalid Unicode sequence: expected format \\\\uxxxx\n throw new IllegalArgumentException(\"luni.08\");\n }\n if (keyLength == -1 && offset > 0) {\n keyLength = offset;\n }\n if (keyLength >= 0) {\n final String temp = new String(buf, 0, offset);\n String value = temp.substring(keyLength);\n if (mode == SLASH) {\n value += \"\\u0000\";\n }\n putKeyValue(crashData, temp.substring(0, keyLength), value);\n }\n\n IOUtils.safeClose(reader);\n\n return crashData;\n } finally {\n IOUtils.safeClose(br);\n }\n }",
"long getSupplementaryFlags(ClassSymbol c) {\n if (jrtIndex == null || !jrtIndex.isInJRT(c.classfile) || c.name == names.module_info) {\n return 0;\n }\n\n if (supplementaryFlags == null) {\n supplementaryFlags = new HashMap<>();\n }\n\n Long flags = supplementaryFlags.get(c.packge());\n if (flags == null) {\n long newFlags = 0;\n try {\n JRTIndex.CtSym ctSym = jrtIndex.getCtSym(c.packge().flatName());\n Profile minProfile = Profile.DEFAULT;\n if (ctSym.proprietary)\n newFlags |= PROPRIETARY;\n if (ctSym.minProfile != null)\n minProfile = Profile.lookup(ctSym.minProfile);\n if (profile != Profile.DEFAULT && minProfile.value > profile.value) {\n newFlags |= NOT_IN_PROFILE;\n }\n } catch (IOException ignore) {\n }\n supplementaryFlags.put(c.packge(), flags = newFlags);\n }\n return flags;\n }",
"def load_file(self, sequence):\n \"\"\"Load data from an \"external\" data file an pass it to\n the given |IOSequence|.\"\"\"\n try:\n if sequence.filetype_ext == 'npy':\n sequence.series = sequence.adjust_series(\n *self._load_npy(sequence))\n elif sequence.filetype_ext == 'asc':\n sequence.series = sequence.adjust_series(\n *self._load_asc(sequence))\n elif sequence.filetype_ext == 'nc':\n self._load_nc(sequence)\n except BaseException:\n objecttools.augment_excmessage(\n 'While trying to load the external data of sequence %s'\n % objecttools.devicephrase(sequence))",
"public static void checkLoaded() {\n if (LOAD_ERROR != null) {\n throw LOAD_ERROR;\n }\n // Make a test call, sometimes the error won't occur until you try the native method.\n // 2 ^ 3 = 8, 8 mod 5 = 3\n BigInteger two = BigInteger.valueOf(2);\n BigInteger three = BigInteger.valueOf(3);\n BigInteger four = BigInteger.valueOf(4);\n BigInteger five = BigInteger.valueOf(5);\n BigInteger answer;\n\n answer = modPowInsecure(two, three, five);\n if (!three.equals(answer)) {\n throw new AssertionError(\"libgmp is loaded but modPowInsecure returned the wrong answer\");\n }\n\n answer = modPowSecure(two, three, five);\n if (!three.equals(answer)) {\n throw new AssertionError(\"libgmp is loaded but modPowSecure returned the wrong answer\");\n }\n\n int answr = kronecker(four, five);\n if (answr != 1) {\n throw new AssertionError(\"libgmp is loaded but kronecker returned the wrong answer\");\n }\n }",
"def load_shx(self, shapefile_name):\r\n \"\"\"\r\n Attempts to load file with .shx extension as both lower and upper case\r\n \"\"\"\r\n shx_ext = 'shx'\r\n try:\r\n self.shx = open(\"%s.%s\" % (shapefile_name, shx_ext), \"rb\")\r\n except IOError:\r\n try:\r\n self.shx = open(\"%s.%s\" % (shapefile_name, shx_ext.upper()), \"rb\")\r\n except IOError:\r\n pass",
"function parse_ExternName(blob, length, opts) {\n\tvar flags = blob.read_shift(2);\n\tvar body;\n\tvar o = ({\n\t\tfBuiltIn: flags & 0x01,\n\t\tfWantAdvise: (flags >>> 1) & 0x01,\n\t\tfWantPict: (flags >>> 2) & 0x01,\n\t\tfOle: (flags >>> 3) & 0x01,\n\t\tfOleLink: (flags >>> 4) & 0x01,\n\t\tcf: (flags >>> 5) & 0x3FF,\n\t\tfIcon: flags >>> 15 & 0x01\n\t});\n\tif(opts.sbcch === 0x3A01) body = parse_AddinUdf(blob, length-2, opts);\n\t//else throw new Error(\"unsupported SupBook cch: \" + opts.sbcch);\n\to.body = body || blob.read_shift(length-2);\n\tif(typeof body === \"string\") o.Name = body;\n\treturn o;\n}",
"private boolean readExternalId(boolean requireSystemName, boolean assignFields) throws IOException, KriptonRuntimeException {\n\t\tskip();\n\t\tint c = peekCharacter();\n\n\t\tif (c == 'S') {\n\t\t\tread(SYSTEM);\n\t\t} else if (c == 'P') {\n\t\t\tread(PUBLIC);\n\t\t\tskip();\n\t\t\tif (assignFields) {\n\t\t\t\tpublicId = readQuotedId(true);\n\t\t\t} else {\n\t\t\t\treadQuotedId(false);\n\t\t\t}\n\t\t} else {\n\t\t\treturn false;\n\t\t}\n\n\t\tskip();\n\n\t\tif (!requireSystemName) {\n\t\t\tint delimiter = peekCharacter();\n\t\t\tif (delimiter != '\"' && delimiter != '\\'') {\n\t\t\t\treturn true; // no system name!\n\t\t\t}\n\t\t}\n\n\t\tif (assignFields) {\n\t\t\tsystemId = readQuotedId(true);\n\t\t} else {\n\t\t\treadQuotedId(false);\n\t\t}\n\t\treturn true;\n\t}",
"private void unloadSpectraConditionally(NavigableMap<Integer, IScan> scansInSubsetByNumber,\n LCMSDataSubset subset, Set<LCMSDataSubset> exlude) {\n boolean isOkToUnload;\n for (IScan scan : scansInSubsetByNumber.values()) {\n if (subset.isInSubset(scan)) {\n isOkToUnload = true;\n for (LCMSDataSubset exludedSubset : exlude) {\n if (exludedSubset.isInSubset(scan)) {\n isOkToUnload = false;\n break;\n }\n }\n if (isOkToUnload) {\n scan.setSpectrum(null, false);\n }\n }\n }\n }",
"private void recomputeLoadSums() {\r\n loadsHaveChanged.set(false);\r\n for (int d = 0; d < nbDims; d++) {\r\n int sli = 0;\r\n int sls = 0;\r\n for (int b = 0; b < nbBins; b++) {\r\n sli += loads[d][b].getLB();\r\n sls += loads[d][b].getUB();\r\n }\r\n this.sumLoadInf[d].set(sli);\r\n this.sumLoadSup[d].set(sls);\r\n }\r\n }"
] |
[
0.7070738673210144,
0.6922526359558105,
0.6904281973838806,
0.6704273223876953,
0.6688159108161926,
0.66507488489151,
0.6602060794830322,
0.6574947834014893,
0.655653715133667,
0.6555174589157104,
0.6555153727531433,
0.6554554104804993
] |
Find matching file names for given ICRU numbers
|
def _decrypt_icru_files(numbers):
"""Find matching file names for given ICRU numbers"""
import json
icru_file = resource_string(__name__, os.path.join('data', 'SH12A_ICRU_table.json'))
ref_dict = json.loads(icru_file.decode('ascii'))
try:
return [ref_dict[e] for e in numbers]
except KeyError as er:
logger.error("There is no ICRU file for id: {0}".format(er))
raise
|
[
"def _find_umi(files):\n \"\"\"Find UMI file using different naming schemes.\n\n R1/R2/R3 => R1/R3 with R2 UMI\n R1/R2/I1 => R1/R2 with I1 UMI\n \"\"\"\n base = os.path.basename(_commonprefix(files))\n\n def _file_ext(f):\n exts = utils.splitext_plus(os.path.basename(f).replace(base, \"\"))[0].split(\"_\")\n exts = [x for x in exts if x]\n return exts[0]\n\n exts = dict([(_file_ext(f), f) for f in files])\n if \"I1\" in exts:\n return exts[\"R1\"], exts[\"R2\"], exts[\"I1\"]\n else:\n assert \"R3\" in exts, exts\n return exts[\"R1\"], exts[\"R3\"], exts[\"R2\"]",
"def _analyse_mat_sections(sections):\n \"\"\"\n Cases:\n - ICRU flag present, LOADDEDX flag missing -> data loaded from some data hardcoded in SH12A binary,\n no need to load external files\n - ICRU flag present, LOADDEDX flag present -> data loaded from external files. ICRU number read from ICRU flag,\n any number following LOADDEDX flag is ignored.\n - ICRU flag missing, LOADDEDX flag present -> data loaded from external files. ICRU number read from LOADDEDX\n - ICRU flag missing, LOADDEDX flag missing -> nothing happens\n \"\"\"\n icru_numbers = []\n for section in sections:\n load_present = False\n load_value = False\n icru_value = False\n for e in section:\n split_line = e.split()\n if \"LOADDEDX\" in e:\n load_present = True\n if len(split_line) > 1:\n load_value = split_line[1] if \"!\" not in split_line[1] else False # ignore ! comments\n elif \"ICRU\" in e and len(split_line) > 1:\n icru_value = split_line[1] if \"!\" not in split_line[1] else False # ignore ! comments\n if load_present: # LOADDEDX is present, so external file is required\n if icru_value: # if ICRU value was given\n icru_numbers.append(icru_value)\n elif load_value: # if only LOADDEDX with values was present in section\n icru_numbers.append(load_value)\n return icru_numbers",
"def search_data_custom(Channel, TraceTitle, RunNos, directoryPath='.'):\n \"\"\"\n Lets you create a list with full file paths of the files\n named with the LeCroy's custom naming scheme.\n\n Parameters\n ----------\n Channel : int\n The channel you want to load\n TraceTitle : string\n The custom trace title of the files. \n RunNos : sequence\n Sequence of run numbers you want to load\n RepeatNos : sequence\n Sequence of repeat numbers you want to load\n directoryPath : string, optional\n The path to the directory housing the data\n The default is the current directory\n\n Returns\n -------\n Paths : list\n A list containing the full file paths of the files you were looking for. \n \"\"\"\n files = glob('{}/*'.format(directoryPath))\n files_CorrectChannel = [] \n for file_ in files:\n if 'C{}'.format(Channel) in file_:\n files_CorrectChannel.append(file_)\n files_CorrectRunNo = []\n for RunNo in RunNos:\n files_match = _fnmatch.filter(\n files_CorrectChannel, '*C{}'.format(Channel)+TraceTitle+str(RunNo).zfill(5)+'.*')\n for file_ in files_match:\n files_CorrectRunNo.append(file_)\n print(\"loading the following files: {}\".format(files_CorrectRunNo))\n paths = files_CorrectRunNo\n return paths",
"def _find_files(dl_paths, publisher, url_dict):\n \"\"\"Find files corresponding to urls.\"\"\"\n if publisher == 'cnn':\n top_dir = os.path.join(dl_paths['cnn_stories'], 'cnn', 'stories')\n elif publisher == 'dm':\n top_dir = os.path.join(dl_paths['dm_stories'], 'dailymail', 'stories')\n else:\n logging.fatal('Unsupported publisher: %s', publisher)\n files = tf.io.gfile.listdir(top_dir)\n\n ret_files = []\n for p in files:\n basename = os.path.basename(p)\n if basename[0:basename.find('.story')] in url_dict:\n ret_files.append(os.path.join(top_dir, p))\n return ret_files",
"def search_aikif(txt, formatHTML=True):\n \"\"\"\n search for text - currently this looks in all folders in the\n root of AIKIF but that also contains binaries so will need to \n use the agent_filelist.py to specify the list of folders.\n \n NOTE - this needs to use indexes rather than full search each time\n \"\"\"\n results = []\n num_found = 0\n import aikif.lib.cls_filelist as mod_fl\n my_files = mod_fl.FileList([aikif_folder ], ['*.*'], ['*.pyc'])\n files = my_files.get_list()\n for f in files:\n try:\n num_found = 0\n with open(f, 'r') as cur:\n line_num = 0\n for line in cur:\n line_num += 1\n if txt in line:\n num_found += 1\n if formatHTML is True:\n results.append(format_result(line, line_num, txt))\n else:\n results.append([f, line, line_num, txt])\n if num_found > 0:\n if formatHTML is True:\n results.append('<h3>' + f + ' = ' + str(num_found) + ' results</h3>')\n else: \n print(f + ' = ' + str(num_found) + '')\n except Exception:\n results.append('problem with file ' + f)\n if len(results) == 0:\n results.append(\"No results\")\n return results",
"def get_filenames(is_training, data_dir):\n \"\"\"Return filenames for dataset.\"\"\"\n if is_training:\n return [\n os.path.join(data_dir, 'train-%05d-of-01024' % i)\n for i in range(_NUM_TRAIN_FILES)]\n else:\n return [\n os.path.join(data_dir, 'validation-%05d-of-00128' % i)\n for i in range(128)]",
"def get_filenames(options):\n\n '''Get the filenames from the command line, optionally sorted by\nnumber, so that IMG_10.png is re-arranged to come after IMG_9.png.\nThis is a nice feature because some scanner programs (like Image\nCapture on Mac OS X) automatically number files without leading zeros,\nand this way you can supply files using a wildcard and still have the\npages ordered correctly.\n\n '''\n\n if not options.sort_numerically:\n return options.filenames\n\n filenames = []\n\n for filename in options.filenames:\n basename = os.path.basename(filename)\n root, _ = os.path.splitext(basename)\n matches = re.findall(r'[0-9]+', root)\n if matches:\n num = int(matches[-1])\n else:\n num = -1\n filenames.append((num, filename))\n\n return [fn for (_, fn) in sorted(filenames)]",
"def ifiles(irods_path):\n \"\"\"Return a list of filenames for given iRODS path (recursively)\"\"\"\n raw_output = subprocess.check_output(\n \"ils -r --bundle {0}\"\n \" | grep 'Bundle file:'\"\n \" | awk '{{print $3}}'\".format(irods_path),\n shell=True\n )\n filenames = raw_output.decode('ascii').strip().split(\"\\n\")\n return filenames",
"def identify_datafiles(root,\n extensions_to_ignore=None,\n directories_to_ignore=None,\n files_to_ignore=None):\n \"\"\" Identify files that might contain data\n\n See function IP_verified() for details about optinoal parmeters\n \"\"\"\n\n for dirpath, dirnames, filenames in walk(root):\n\n for ignore in directories_to_ignore:\n if ignore in dirnames:\n dirnames.remove(ignore) # don't visit ignored directories\n\n\n for filename in filenames:\n\n\n # Ignore extensions that need no IP check\n ignore = False\n for ext in extensions_to_ignore:\n if filename.endswith(ext):\n ignore = True\n\n if filename in files_to_ignore:\n ignore = True\n\n if ignore is False:\n yield dirpath, filename",
"def findall(self):\n \"\"\"Find all files under the base and set ``allfiles`` to the absolute\n pathnames of files found.\n \"\"\"\n from stat import S_ISREG, S_ISDIR, S_ISLNK\n\n self.allfiles = allfiles = []\n root = self.base\n stack = [root]\n pop = stack.pop\n push = stack.append\n\n while stack:\n root = pop()\n names = os.listdir(root)\n\n for name in names:\n fullname = os.path.join(root, name)\n\n # Avoid excess stat calls -- just one will do, thank you!\n stat = os.stat(fullname)\n mode = stat.st_mode\n if S_ISREG(mode):\n allfiles.append(fsdecode(fullname))\n elif S_ISDIR(mode) and not S_ISLNK(mode):\n push(fullname)",
"def irafglob(inlist, atfile=None):\n \"\"\" Returns a list of filenames based on the type of IRAF input.\n\n Handles lists, wild-card characters, and at-files. For special\n at-files, use the atfile keyword to process them.\n\n This function is recursive, so IRAF lists can also contain at-files\n and wild-card characters, e.g. `a.fits`, `@file.lst`, `*flt.fits`.\n \"\"\"\n\n # Sanity check\n if inlist is None or len(inlist) == 0:\n return []\n\n # Determine which form of input was provided:\n if isinstance(inlist, list):\n # python list\n flist = []\n for f in inlist:\n flist += irafglob(f)\n elif ',' in inlist:\n # comma-separated string list\n flist = []\n for f in inlist.split(','):\n f = f.strip()\n flist += irafglob(f)\n elif inlist[0] == '@':\n # file list\n flist = []\n for f in open(inlist[1:], 'r').readlines():\n f = f.rstrip()\n # hook for application specific atfiles.\n if atfile:\n f = atfile(f)\n flist += irafglob(f)\n else:\n # shell globbing\n if osfn:\n inlist = osfn(inlist)\n flist = glob.glob(inlist)\n\n return flist",
"def find(i):\n \"\"\"\n Input: {\n (repo_uoa) - repo UOA\n module_uoa - module UOA\n data_uoa - data UOA\n }\n\n Output: { \n Output of the 'load' function \n\n number_of_entries - total number of found entries\n }\n \"\"\"\n\n o=i.get('out','')\n i['out']=''\n\n # Check wildcards\n lst=[]\n\n a=i.get('repo_uoa','')\n m=i.get('module_uoa','')\n duoa=i.get('data_uoa','')\n\n if m=='':\n return {'return':1, 'error':'module UOA is not defined'}\n if duoa=='':\n return {'return':1, 'error':'data UOA is not defined'}\n\n if a.find('*')>=0 or a.find('?')>=0 or m.find('*')>=0 or m.find('?')>=0 or duoa.find('*')>=0 or duoa.find('?')>=0: \n r=list_data({'repo_uoa':a, 'module_uoa':m, 'data_uoa':duoa})\n if r['return']>0: return r\n\n lst=r['lst']\n\n r={'return':0}\n\n if len(lst)>0:\n r.update(lst[0])\n else:\n return {'return':1, 'error':'entry was not found'}\n\n else:\n # Find path to data\n r=find_path_to_data(i)\n if r['return']>0: return r\n\n p=r['path']\n ruoa=r.get('repo_uoa','')\n ruid=r.get('repo_uid','')\n muoa=r.get('module_uoa','')\n muid=r.get('module_uid','')\n duid=r.get('data_uid','')\n duoa=r.get('data_alias','')\n if duoa=='': duoa=duid\n\n lst.append({'path':p, 'repo_uoa':ruoa, 'repo_uid':ruid, \n 'module_uoa':muoa, 'module_uid':muid, \n 'data_uoa':duoa, 'data_uid': duid})\n\n if o=='con':\n pf='' \n for q in lst:\n p=q['path']\n out(p)\n if pf=='': pf=p\n\n i['out']=o\n\n r['number_of_entries']=len(lst)\n\n return r"
] |
[
0.7105668187141418,
0.6876538395881653,
0.6679221391677856,
0.6674149036407471,
0.6648903489112854,
0.664292573928833,
0.6628639101982117,
0.6617603898048401,
0.6605300307273865,
0.660112202167511,
0.6600364446640015,
0.6591923236846924
] |
Rewrite paths in config files to match convention job_xxxx/symlink
Requires path to run_xxxx/input/config_file and a list of paths_to_replace
|
def _rewrite_paths_in_file(config_file, paths_to_replace):
"""
Rewrite paths in config files to match convention job_xxxx/symlink
Requires path to run_xxxx/input/config_file and a list of paths_to_replace
"""
lines = []
# make a copy of config
import shutil
shutil.copyfile(config_file, str(config_file + '_original'))
with open(config_file) as infile:
for line in infile:
for old_path in paths_to_replace:
if old_path in line:
new_path = os.path.split(old_path)[-1]
line = line.replace(old_path, new_path)
logger.debug("Changed path {0} ---> {1} in file {2}".format(old_path, new_path, config_file))
lines.append(line)
with open(config_file, 'w') as outfile:
for line in lines:
outfile.write(line)
|
[
"def rewrite_paths(self, local_path, remote_path):\n \"\"\"\n Rewrite references to `local_path` with `remote_path` in job inputs.\n \"\"\"\n self.__rewrite_command_line(local_path, remote_path)\n self.__rewrite_config_files(local_path, remote_path)",
"def rewrite_input_paths(self):\n \"\"\"\n For each file that has been transferred and renamed, updated\n command_line and configfiles to reflect that rewrite.\n \"\"\"\n for local_path, remote_path in self.file_renames.items():\n self.job_inputs.rewrite_paths(local_path, remote_path)",
"def replace(old_value, new_value, full_match=False):\n '''\n Replace string or full line matches in switch's running config\n\n If full_match is set to True, then the whole line will need to be matched\n as part of the old value.\n\n .. code-block:: bash\n\n salt '*' onyx.cmd replace 'TESTSTRINGHERE' 'NEWTESTSTRINGHERE'\n '''\n if full_match is False:\n matcher = re.compile('^.*{0}.*$'.format(re.escape(old_value)), re.MULTILINE)\n repl = re.compile(re.escape(old_value))\n else:\n matcher = re.compile(old_value, re.MULTILINE)\n repl = re.compile(old_value)\n\n lines = {'old': [], 'new': []}\n for line in matcher.finditer(show_run()):\n lines['old'].append(line.group(0))\n lines['new'].append(repl.sub(new_value, line.group(0)))\n\n delete_config(lines['old'])\n add_config(lines['new'])\n\n return lines",
"def fix_paths(job):\n \"\"\"\n Coerce input arguments to use temporary files when used for output.\n\n Return a list of temporary file pairs (tmpfile, destination path) and\n a list of arguments.\n\n Converts each HdfsTarget to a string for the path.\n \"\"\"\n tmp_files = []\n args = []\n for x in job.args():\n if isinstance(x, luigi.contrib.hdfs.HdfsTarget): # input/output\n if x.exists() or not job.atomic_output(): # input\n args.append(x.path)\n else: # output\n x_path_no_slash = x.path[:-1] if x.path[-1] == '/' else x.path\n y = luigi.contrib.hdfs.HdfsTarget(x_path_no_slash + '-luigi-tmp-%09d' % random.randrange(0, 1e10))\n tmp_files.append((y, x_path_no_slash))\n logger.info('Using temp path: %s for path %s', y.path, x.path)\n args.append(y.path)\n else:\n try:\n # hopefully the target has a path to use\n args.append(x.path)\n except AttributeError:\n # if there's no path then hope converting it to a string will work\n args.append(str(x))\n\n return (tmp_files, args)",
"def replace(name, repl, full_match=False):\n '''\n Replace all instances of a string or full line in the running config\n\n name\n String to replace\n\n repl\n The replacement text\n\n full_match\n Whether `name` will match the full line or only a subset of the line.\n Defaults to False. When False, .* is added around `name` for matching\n in the `show run` config.\n\n Examples:\n\n .. code-block:: yaml\n\n replace snmp string:\n onyx.replace:\n - name: randoSNMPstringHERE\n - repl: NEWrandoSNMPstringHERE\n\n replace full snmp string:\n onyx.replace:\n - name: ^snmp-server community randoSNMPstringHERE group network-operator$\n - repl: snmp-server community NEWrandoSNMPstringHERE group network-operator\n - full_match: True\n\n .. note::\n The first example will replace the SNMP string on both the group and\n the ACL, so you will not lose the ACL setting. Because the second is\n an exact match of the line, when the group is removed, the ACL is\n removed, but not readded, because it was not matched.\n\n '''\n ret = {'name': name,\n 'result': False,\n 'changes': {},\n 'comment': ''}\n\n if full_match is False:\n search = '^.*{0}.*$'.format(name)\n else:\n search = name\n\n matches = __salt__['onyx.cmd']('find', search)\n\n if not matches:\n ret['result'] = True\n ret['comment'] = 'Nothing found to replace'\n return ret\n\n if __opts__['test'] is True:\n ret['result'] = None\n ret['comment'] = 'Configs will be changed'\n ret['changes']['old'] = matches\n ret['changes']['new'] = [re.sub(name, repl, match) for match in matches]\n return ret\n\n ret['changes'] = __salt__['onyx.cmd']('replace', name, repl, full_match=full_match)\n\n matches = __salt__['onyx.cmd']('find', search)\n\n if matches:\n ret['result'] = False\n ret['comment'] = 'Failed to replace all instances of \"{0}\"'.format(name)\n else:\n ret['result'] = True\n ret['comment'] = 'Successfully replaced all instances of \"{0}\" with \"{1}\"'.format(name, repl)\n\n return ret",
"def set_input_path(input_job)\n job_name = input_job.job_name\n input_filter = job_config['input_filter']\n\n s3_path = \"/data/1-parse/#{job_name}/segments/*/#{input_filter}\"\n build_s3_uri(s3_path)\n end",
"def job_config_path\n user_override = Janky.jobs_config_dir.join(\"#{job_template.downcase}.xml.erb\") if job_template\n custom = Janky.jobs_config_dir.join(\"#{name.downcase}.xml.erb\")\n default = Janky.jobs_config_dir.join(\"default.xml.erb\")\n\n if user_override && user_override.readable?\n user_override\n elsif custom.readable?\n custom\n elsif default.readable?\n default\n else\n raise Error, \"no config.xml.erb template for repo #{id.inspect}\"\n end\n end",
"def modify_jsonyaml_paths(jsonyaml_file):\n \"\"\"\n Changes relative paths in a json/yaml file to be relative\n to where the json/yaml file is located.\n\n :param jsonyaml_file: Path to a json/yaml file.\n \"\"\"\n loader = schema_salad.ref_resolver.Loader({\n \"location\": {\"@type\": \"@id\"},\n \"path\": {\"@type\": \"@id\"}\n })\n input_dict, _ = loader.resolve_ref(jsonyaml_file, checklinks=False)\n basedir = os.path.dirname(jsonyaml_file)\n\n def fixpaths(d):\n \"\"\"Make sure all paths have a URI scheme.\"\"\"\n if isinstance(d, dict):\n if \"path\" in d:\n if \":\" not in d[\"path\"]:\n local_path = os.path.normpath(os.path.join(os.getcwd(), basedir, d[\"path\"]))\n d[\"location\"] = pathname2url(local_path)\n else:\n d[\"location\"] = d[\"path\"]\n del d[\"path\"]\n\n visit(input_dict, fixpaths)\n return json.dumps(input_dict)",
"def symlink_configs\n paths = Dir.glob(\"#{shared_path}/config/**/*\").\n select {|p| File.file?(p) }\n paths.each do |src|\n relative_path = src.gsub(%r{.*config/},'config/')\n dest = \"#{release_path}/#{relative_path}\"\n # make sure the directory exist for symlink creation\n dirname = File.dirname(dest)\n FileUtils.mkdir_p(dirname) unless File.exist?(dirname)\n FileUtils.rm_rf(dest) if File.exist?(dest)\n FileUtils.ln_s(src,dest)\n end\n end",
"def replace_pattern(name,\n pattern,\n repl,\n count=0,\n flags=8,\n bufsize=1,\n append_if_not_found=False,\n prepend_if_not_found=False,\n not_found_content=None,\n search_only=False,\n show_changes=True,\n backslash_literal=False,\n source='running',\n path=None,\n test=False,\n replace=True,\n debug=False,\n commit=True):\n '''\n .. versionadded:: 2019.2.0\n\n Replace occurrences of a pattern in the configuration source. If\n ``show_changes`` is ``True``, then a diff of what changed will be returned,\n otherwise a ``True`` will be returned when changes are made, and ``False``\n when no changes are made.\n This is a pure Python implementation that wraps Python's :py:func:`~re.sub`.\n\n pattern\n A regular expression, to be matched using Python's\n :py:func:`~re.search`.\n\n repl\n The replacement text.\n\n count: ``0``\n Maximum number of pattern occurrences to be replaced. If count is a\n positive integer ``n``, only ``n`` occurrences will be replaced,\n otherwise all occurrences will be replaced.\n\n flags (list or int): ``8``\n A list of flags defined in the ``re`` module documentation from the\n Python standard library. Each list item should be a string that will\n correlate to the human-friendly flag name. E.g., ``['IGNORECASE',\n 'MULTILINE']``. Optionally, ``flags`` may be an int, with a value\n corresponding to the XOR (``|``) of all the desired flags. Defaults to\n 8 (which supports 'MULTILINE').\n\n bufsize (int or str): ``1``\n How much of the configuration to buffer into memory at once. The\n default value ``1`` processes one line at a time. The special value\n ``file`` may be specified which will read the entire file into memory\n before processing.\n\n append_if_not_found: ``False``\n If set to ``True``, and pattern is not found, then the content will be\n appended to the file.\n\n prepend_if_not_found: ``False``\n If set to ``True`` and pattern is not found, then the content will be\n prepended to the file.\n\n not_found_content\n Content to use for append/prepend if not found. If None (default), uses\n ``repl``. Useful when ``repl`` uses references to group in pattern.\n\n search_only: ``False``\n If set to true, this no changes will be performed on the file, and this\n function will simply return ``True`` if the pattern was matched, and\n ``False`` if not.\n\n show_changes: ``True``\n If ``True``, return a diff of changes made. Otherwise, return ``True``\n if changes were made, and ``False`` if not.\n\n backslash_literal: ``False``\n Interpret backslashes as literal backslashes for the repl and not\n escape characters. This will help when using append/prepend so that\n the backslashes are not interpreted for the repl on the second run of\n the state.\n\n source: ``running``\n The configuration source. Choose from: ``running``, ``candidate``, or\n ``startup``. Default: ``running``.\n\n path\n Save the temporary configuration to a specific path, then read from\n there.\n\n test: ``False``\n Dry run? If set as ``True``, will apply the config, discard and return\n the changes. Default: ``False`` and will commit the changes on the\n device.\n\n commit: ``True``\n Commit the configuration changes? Default: ``True``.\n\n debug: ``False``\n Debug mode. Will insert a new key in the output dictionary, as\n ``loaded_config`` containing the raw configuration loaded on the device.\n\n replace: ``True``\n Load and replace the configuration. Default: ``True``.\n\n If an equal sign (``=``) appears in an argument to a Salt command it is\n interpreted as a keyword argument in the format ``key=val``. That\n processing can be bypassed in order to pass an equal sign through to the\n remote shell command by manually specifying the kwarg:\n\n State SLS Example:\n\n .. code-block:: yaml\n\n update_policy_name:\n netconfig.replace_pattern:\n - pattern: OLD-POLICY-NAME\n - repl: new-policy-name\n - debug: true\n '''\n ret = salt.utils.napalm.default_ret(name)\n # the user can override the flags the equivalent CLI args\n # which have higher precedence\n test = __salt__['config.merge']('test', test)\n debug = __salt__['config.merge']('debug', debug)\n commit = __salt__['config.merge']('commit', commit)\n replace = __salt__['config.merge']('replace', replace) # this might be a bit risky\n replace_ret = __salt__['net.replace_pattern'](pattern,\n repl,\n count=count,\n flags=flags,\n bufsize=bufsize,\n append_if_not_found=append_if_not_found,\n prepend_if_not_found=prepend_if_not_found,\n not_found_content=not_found_content,\n search_only=search_only,\n show_changes=show_changes,\n backslash_literal=backslash_literal,\n source=source,\n path=path,\n test=test,\n replace=replace,\n debug=debug,\n commit=commit)\n return salt.utils.napalm.loaded_ret(ret, replace_ret, test, debug)",
"def replace_pattern(pattern,\n repl,\n count=0,\n flags=8,\n bufsize=1,\n append_if_not_found=False,\n prepend_if_not_found=False,\n not_found_content=None,\n search_only=False,\n show_changes=True,\n backslash_literal=False,\n source=None,\n path=None,\n test=False,\n replace=True,\n debug=False,\n commit=True):\n '''\n .. versionadded:: 2019.2.0\n\n Replace occurrences of a pattern in the configuration source. If\n ``show_changes`` is ``True``, then a diff of what changed will be returned,\n otherwise a ``True`` will be returned when changes are made, and ``False``\n when no changes are made.\n This is a pure Python implementation that wraps Python's :py:func:`~re.sub`.\n\n pattern\n A regular expression, to be matched using Python's\n :py:func:`~re.search`.\n\n repl\n The replacement text.\n\n count: ``0``\n Maximum number of pattern occurrences to be replaced. If count is a\n positive integer ``n``, only ``n`` occurrences will be replaced,\n otherwise all occurrences will be replaced.\n\n flags (list or int): ``8``\n A list of flags defined in the ``re`` module documentation from the\n Python standard library. Each list item should be a string that will\n correlate to the human-friendly flag name. E.g., ``['IGNORECASE',\n 'MULTILINE']``. Optionally, ``flags`` may be an int, with a value\n corresponding to the XOR (``|``) of all the desired flags. Defaults to\n 8 (which supports 'MULTILINE').\n\n bufsize (int or str): ``1``\n How much of the configuration to buffer into memory at once. The\n default value ``1`` processes one line at a time. The special value\n ``file`` may be specified which will read the entire file into memory\n before processing.\n\n append_if_not_found: ``False``\n If set to ``True``, and pattern is not found, then the content will be\n appended to the file.\n\n prepend_if_not_found: ``False``\n If set to ``True`` and pattern is not found, then the content will be\n prepended to the file.\n\n not_found_content\n Content to use for append/prepend if not found. If None (default), uses\n ``repl``. Useful when ``repl`` uses references to group in pattern.\n\n search_only: ``False``\n If set to true, this no changes will be performed on the file, and this\n function will simply return ``True`` if the pattern was matched, and\n ``False`` if not.\n\n show_changes: ``True``\n If ``True``, return a diff of changes made. Otherwise, return ``True``\n if changes were made, and ``False`` if not.\n\n backslash_literal: ``False``\n Interpret backslashes as literal backslashes for the repl and not\n escape characters. This will help when using append/prepend so that\n the backslashes are not interpreted for the repl on the second run of\n the state.\n\n source: ``running``\n The configuration source. Choose from: ``running``, ``candidate``, or\n ``startup``. Default: ``running``.\n\n path\n Save the temporary configuration to a specific path, then read from\n there.\n\n test: ``False``\n Dry run? If set as ``True``, will apply the config, discard and return\n the changes. Default: ``False`` and will commit the changes on the\n device.\n\n commit: ``True``\n Commit the configuration changes? Default: ``True``.\n\n debug: ``False``\n Debug mode. Will insert a new key in the output dictionary, as\n ``loaded_config`` containing the raw configuration loaded on the device.\n\n replace: ``True``\n Load and replace the configuration. Default: ``True``.\n\n If an equal sign (``=``) appears in an argument to a Salt command it is\n interpreted as a keyword argument in the format ``key=val``. That\n processing can be bypassed in order to pass an equal sign through to the\n remote shell command by manually specifying the kwarg:\n\n .. code-block:: bash\n\n salt '*' net.replace_pattern \"bind-address\\\\s*=\" \"bind-address:\"\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' net.replace_pattern PREFIX-LIST_NAME new-prefix-list-name\n salt '*' net.replace_pattern bgp-group-name new-bgp-group-name count=1\n '''\n config_saved = save_config(source=source, path=path)\n if not config_saved or not config_saved['result']:\n return config_saved\n path = config_saved['out']\n replace_pattern = __salt__['file.replace'](path,\n pattern,\n repl,\n count=count,\n flags=flags,\n bufsize=bufsize,\n append_if_not_found=append_if_not_found,\n prepend_if_not_found=prepend_if_not_found,\n not_found_content=not_found_content,\n search_only=search_only,\n show_changes=show_changes,\n backslash_literal=backslash_literal)\n with salt.utils.files.fopen(path, 'r') as fh_:\n updated_config = fh_.read()\n return __salt__['net.load_config'](text=updated_config,\n test=test,\n debug=debug,\n replace=replace,\n commit=commit)",
"def replace(path,\n pattern,\n repl,\n count=0,\n flags=8,\n bufsize=1,\n append_if_not_found=False,\n prepend_if_not_found=False,\n not_found_content=None,\n backup='.bak',\n dry_run=False,\n search_only=False,\n show_changes=True,\n ignore_if_missing=False,\n preserve_inode=True,\n backslash_literal=False,\n ):\n '''\n .. versionadded:: 0.17.0\n\n Replace occurrences of a pattern in a file. If ``show_changes`` is\n ``True``, then a diff of what changed will be returned, otherwise a\n ``True`` will be returned when changes are made, and ``False`` when\n no changes are made.\n\n This is a pure Python implementation that wraps Python's :py:func:`~re.sub`.\n\n path\n Filesystem path to the file to be edited. If a symlink is specified, it\n will be resolved to its target.\n\n pattern\n A regular expression, to be matched using Python's\n :py:func:`~re.search`.\n\n repl\n The replacement text\n\n count : 0\n Maximum number of pattern occurrences to be replaced. If count is a\n positive integer ``n``, only ``n`` occurrences will be replaced,\n otherwise all occurrences will be replaced.\n\n flags (list or int)\n A list of flags defined in the ``re`` module documentation from the\n Python standard library. Each list item should be a string that will\n correlate to the human-friendly flag name. E.g., ``['IGNORECASE',\n 'MULTILINE']``. Optionally, ``flags`` may be an int, with a value\n corresponding to the XOR (``|``) of all the desired flags. Defaults to\n 8 (which supports 'MULTILINE').\n\n bufsize (int or str)\n How much of the file to buffer into memory at once. The\n default value ``1`` processes one line at a time. The special value\n ``file`` may be specified which will read the entire file into memory\n before processing.\n\n append_if_not_found : False\n .. versionadded:: 2014.7.0\n\n If set to ``True``, and pattern is not found, then the content will be\n appended to the file.\n\n prepend_if_not_found : False\n .. versionadded:: 2014.7.0\n\n If set to ``True`` and pattern is not found, then the content will be\n prepended to the file.\n\n not_found_content\n .. versionadded:: 2014.7.0\n\n Content to use for append/prepend if not found. If None (default), uses\n ``repl``. Useful when ``repl`` uses references to group in pattern.\n\n backup : .bak\n The file extension to use for a backup of the file before editing. Set\n to ``False`` to skip making a backup.\n\n dry_run : False\n If set to ``True``, no changes will be made to the file, the function\n will just return the changes that would have been made (or a\n ``True``/``False`` value if ``show_changes`` is set to ``False``).\n\n search_only : False\n If set to true, this no changes will be performed on the file, and this\n function will simply return ``True`` if the pattern was matched, and\n ``False`` if not.\n\n show_changes : True\n If ``True``, return a diff of changes made. Otherwise, return ``True``\n if changes were made, and ``False`` if not.\n\n .. note::\n Using this option will store two copies of the file in memory (the\n original version and the edited version) in order to generate the\n diff. This may not normally be a concern, but could impact\n performance if used with large files.\n\n ignore_if_missing : False\n .. versionadded:: 2015.8.0\n\n If set to ``True``, this function will simply return ``False``\n if the file doesn't exist. Otherwise, an error will be thrown.\n\n preserve_inode : True\n .. versionadded:: 2015.8.0\n\n Preserve the inode of the file, so that any hard links continue to\n share the inode with the original filename. This works by *copying* the\n file, reading from the copy, and writing to the file at the original\n inode. If ``False``, the file will be *moved* rather than copied, and a\n new file will be written to a new inode, but using the original\n filename. Hard links will then share an inode with the backup, instead\n (if using ``backup`` to create a backup copy).\n\n backslash_literal : False\n .. versionadded:: 2016.11.7\n\n Interpret backslashes as literal backslashes for the repl and not\n escape characters. This will help when using append/prepend so that\n the backslashes are not interpreted for the repl on the second run of\n the state.\n\n If an equal sign (``=``) appears in an argument to a Salt command it is\n interpreted as a keyword argument in the format ``key=val``. That\n processing can be bypassed in order to pass an equal sign through to the\n remote shell command by manually specifying the kwarg:\n\n .. code-block:: bash\n\n salt '*' file.replace /path/to/file pattern='=' repl=':'\n salt '*' file.replace /path/to/file pattern=\"bind-address\\\\s*=\" repl='bind-address:'\n\n CLI Examples:\n\n .. code-block:: bash\n\n salt '*' file.replace /etc/httpd/httpd.conf pattern='LogLevel warn' repl='LogLevel info'\n salt '*' file.replace /some/file pattern='before' repl='after' flags='[MULTILINE, IGNORECASE]'\n '''\n symlink = False\n if is_link(path):\n symlink = True\n target_path = os.readlink(path)\n given_path = os.path.expanduser(path)\n\n path = os.path.realpath(os.path.expanduser(path))\n\n if not os.path.exists(path):\n if ignore_if_missing:\n return False\n else:\n raise SaltInvocationError('File not found: {0}'.format(path))\n\n if not __utils__['files.is_text'](path):\n raise SaltInvocationError(\n 'Cannot perform string replacements on a binary file: {0}'\n .format(path)\n )\n\n if search_only and (append_if_not_found or prepend_if_not_found):\n raise SaltInvocationError(\n 'search_only cannot be used with append/prepend_if_not_found'\n )\n\n if append_if_not_found and prepend_if_not_found:\n raise SaltInvocationError(\n 'Only one of append and prepend_if_not_found is permitted'\n )\n\n flags_num = _get_flags(flags)\n cpattern = re.compile(salt.utils.stringutils.to_bytes(pattern), flags_num)\n filesize = os.path.getsize(path)\n if bufsize == 'file':\n bufsize = filesize\n\n # Search the file; track if any changes have been made for the return val\n has_changes = False\n orig_file = [] # used for show_changes and change detection\n new_file = [] # used for show_changes and change detection\n if not salt.utils.platform.is_windows():\n pre_user = get_user(path)\n pre_group = get_group(path)\n pre_mode = salt.utils.files.normalize_mode(get_mode(path))\n\n # Avoid TypeErrors by forcing repl to be bytearray related to mmap\n # Replacement text may contains integer: 123 for example\n repl = salt.utils.stringutils.to_bytes(six.text_type(repl))\n if not_found_content:\n not_found_content = salt.utils.stringutils.to_bytes(not_found_content)\n\n found = False\n temp_file = None\n content = salt.utils.stringutils.to_unicode(not_found_content) \\\n if not_found_content and (prepend_if_not_found or append_if_not_found) \\\n else salt.utils.stringutils.to_unicode(repl)\n\n try:\n # First check the whole file, determine whether to make the replacement\n # Searching first avoids modifying the time stamp if there are no changes\n r_data = None\n # Use a read-only handle to open the file\n with salt.utils.files.fopen(path,\n mode='rb',\n buffering=bufsize) as r_file:\n try:\n # mmap throws a ValueError if the file is empty.\n r_data = mmap.mmap(r_file.fileno(),\n 0,\n access=mmap.ACCESS_READ)\n except (ValueError, mmap.error):\n # size of file in /proc is 0, but contains data\n r_data = salt.utils.stringutils.to_bytes(\"\".join(r_file))\n if search_only:\n # Just search; bail as early as a match is found\n if re.search(cpattern, r_data):\n return True # `with` block handles file closure\n else:\n return False\n else:\n result, nrepl = re.subn(cpattern,\n repl.replace('\\\\', '\\\\\\\\') if backslash_literal else repl,\n r_data,\n count)\n\n # found anything? (even if no change)\n if nrepl > 0:\n found = True\n # Identity check the potential change\n has_changes = True if pattern != repl else has_changes\n\n if prepend_if_not_found or append_if_not_found:\n # Search for content, to avoid pre/appending the\n # content if it was pre/appended in a previous run.\n if re.search(salt.utils.stringutils.to_bytes('^{0}($|(?=\\r\\n))'.format(re.escape(content))),\n r_data,\n flags=flags_num):\n # Content was found, so set found.\n found = True\n\n orig_file = r_data.read(filesize).splitlines(True) \\\n if isinstance(r_data, mmap.mmap) \\\n else r_data.splitlines(True)\n new_file = result.splitlines(True)\n\n except (OSError, IOError) as exc:\n raise CommandExecutionError(\n \"Unable to open file '{0}'. \"\n \"Exception: {1}\".format(path, exc)\n )\n finally:\n if r_data and isinstance(r_data, mmap.mmap):\n r_data.close()\n\n if has_changes and not dry_run:\n # Write the replacement text in this block.\n try:\n # Create a copy to read from and to use as a backup later\n temp_file = _mkstemp_copy(path=path,\n preserve_inode=preserve_inode)\n except (OSError, IOError) as exc:\n raise CommandExecutionError(\"Exception: {0}\".format(exc))\n\n r_data = None\n try:\n # Open the file in write mode\n with salt.utils.files.fopen(path,\n mode='w',\n buffering=bufsize) as w_file:\n try:\n # Open the temp file in read mode\n with salt.utils.files.fopen(temp_file,\n mode='r',\n buffering=bufsize) as r_file:\n r_data = mmap.mmap(r_file.fileno(),\n 0,\n access=mmap.ACCESS_READ)\n result, nrepl = re.subn(cpattern,\n repl.replace('\\\\', '\\\\\\\\') if backslash_literal else repl,\n r_data,\n count)\n try:\n w_file.write(salt.utils.stringutils.to_str(result))\n except (OSError, IOError) as exc:\n raise CommandExecutionError(\n \"Unable to write file '{0}'. Contents may \"\n \"be truncated. Temporary file contains copy \"\n \"at '{1}'. \"\n \"Exception: {2}\".format(path, temp_file, exc)\n )\n except (OSError, IOError) as exc:\n raise CommandExecutionError(\"Exception: {0}\".format(exc))\n finally:\n if r_data and isinstance(r_data, mmap.mmap):\n r_data.close()\n except (OSError, IOError) as exc:\n raise CommandExecutionError(\"Exception: {0}\".format(exc))\n\n if not found and (append_if_not_found or prepend_if_not_found):\n if not_found_content is None:\n not_found_content = repl\n if prepend_if_not_found:\n new_file.insert(0, not_found_content + salt.utils.stringutils.to_bytes(os.linesep))\n else:\n # append_if_not_found\n # Make sure we have a newline at the end of the file\n if new_file:\n if not new_file[-1].endswith(salt.utils.stringutils.to_bytes(os.linesep)):\n new_file[-1] += salt.utils.stringutils.to_bytes(os.linesep)\n new_file.append(not_found_content + salt.utils.stringutils.to_bytes(os.linesep))\n has_changes = True\n if not dry_run:\n try:\n # Create a copy to read from and for later use as a backup\n temp_file = _mkstemp_copy(path=path,\n preserve_inode=preserve_inode)\n except (OSError, IOError) as exc:\n raise CommandExecutionError(\"Exception: {0}\".format(exc))\n # write new content in the file while avoiding partial reads\n try:\n fh_ = salt.utils.atomicfile.atomic_open(path, 'wb')\n for line in new_file:\n fh_.write(salt.utils.stringutils.to_bytes(line))\n finally:\n fh_.close()\n\n if backup and has_changes and not dry_run:\n # keep the backup only if it was requested\n # and only if there were any changes\n backup_name = '{0}{1}'.format(path, backup)\n try:\n shutil.move(temp_file, backup_name)\n except (OSError, IOError) as exc:\n raise CommandExecutionError(\n \"Unable to move the temp file '{0}' to the \"\n \"backup file '{1}'. \"\n \"Exception: {2}\".format(path, temp_file, exc)\n )\n if symlink:\n symlink_backup = '{0}{1}'.format(given_path, backup)\n target_backup = '{0}{1}'.format(target_path, backup)\n # Always clobber any existing symlink backup\n # to match the behaviour of the 'backup' option\n try:\n os.symlink(target_backup, symlink_backup)\n except OSError:\n os.remove(symlink_backup)\n os.symlink(target_backup, symlink_backup)\n except Exception:\n raise CommandExecutionError(\n \"Unable create backup symlink '{0}'. \"\n \"Target was '{1}'. \"\n \"Exception: {2}\".format(symlink_backup, target_backup,\n exc)\n )\n elif temp_file:\n try:\n os.remove(temp_file)\n except (OSError, IOError) as exc:\n raise CommandExecutionError(\n \"Unable to delete temp file '{0}'. \"\n \"Exception: {1}\".format(temp_file, exc)\n )\n\n if not dry_run and not salt.utils.platform.is_windows():\n check_perms(path, None, pre_user, pre_group, pre_mode)\n\n differences = __utils__['stringutils.get_diff'](orig_file, new_file)\n\n if show_changes:\n return differences\n\n # We may have found a regex line match but don't need to change the line\n # (for situations where the pattern also matches the repl). Revert the\n # has_changes flag to False if the final result is unchanged.\n if not differences:\n has_changes = False\n\n return has_changes"
] |
[
0.7911860346794128,
0.781268298625946,
0.6906705498695374,
0.690446138381958,
0.6899089813232422,
0.6825046539306641,
0.6709731817245483,
0.6641524434089661,
0.6566023826599121,
0.656368613243103,
0.655708372592926,
0.653624951839447
] |
Check if a given LDAP object exists.
|
def _check_exists(database: Database, table: LdapObjectClass, key: str, value: str):
""" Check if a given LDAP object exists. """
try:
get_one(table, Q(**{key: value}), database=database)
return True
except ObjectDoesNotExist:
return False
|
[
"public boolean exists(Dn dn) {\n try {\n return connection.exists(dn);\n } catch (LdapException e) {\n throw new LdapDaoException(e);\n }\n }",
"private void existsCheck(Dn dn) throws NoSuchNodeException, MissingParentException {\n try {\n if (!connection.exists(dn.getParent())) {\n throw new MissingParentException(lastMatch(dn));\n } else if (!connection.exists(dn)) {\n throw new NoSuchNodeException(dn);\n }\n } catch (LdapException e) {\n throw new LdapDaoException(e);\n }\n }",
"def _search(self, base, fltr, attrs=None, scope=ldap.SCOPE_SUBTREE):\n \"\"\"Perform LDAP search\"\"\"\n try:\n results = self._conn.search_s(base, scope, fltr, attrs)\n except Exception as e:\n log.exception(self._get_ldap_msg(e))\n results = False\n return results",
"def exists(self):\n \"\"\":type: bool\n\n True when the object actually exists (and can be accessed by\n the current user) in Fedora\n \"\"\"\n\n # If we made the object under the pretext that it doesn't exist in\n # fedora yet, then assume it doesn't exist in fedora yet.\n if self._create:\n return False\n\n # If we can get a valid object profile, regardless of its contents,\n # then this object exists. If not, then it doesn't.\n try:\n self.getProfile()\n return True\n except RequestFailed:\n return False",
"def exists(self, **kwargs):\n \"\"\"Check for the existence of the named object on the BIG-IP\n\n Tries to `load()` the object and if it fails checks the exception\n for 404. If the `load()` is successful it returns `True` if the\n exception is :exc:`requests.HTTPError` and the\n ``status_code`` is ``404``\n it will return error. All other errors are raised as is.\n\n :param kwargs: Keyword arguments required to get objects\n NOTE: If kwargs has a 'requests_params' key the corresponding dict will\n be passed to the underlying requests.session.get method where it will\n be handled according to that API. THIS IS HOW TO PASS QUERY-ARGS!\n :returns: bool -- The objects exists on BIG-IP® or not.\n :raises: :exc:`requests.HTTPError`, Any HTTP error that was not status\n code 404.\n \"\"\"\n\n requests_params = self._handle_requests_params(kwargs)\n self._check_load_parameters(**kwargs)\n load_uri = self._create_subpath_uri(kwargs)\n session = self._meta_data['bigip']._meta_data['icr_session']\n kwargs.update(requests_params)\n try:\n session.get(load_uri, **kwargs)\n except HTTPError as err:\n logger.debug(err.response.text)\n if err.response.status_code == 404:\n return False\n else:\n raise\n return True",
"def if_exists(self):\n \"\"\"\n Check the existence of an object before an update or delete.\n\n If the update or delete isn't applied, a LWTException is raised.\n \"\"\"\n if self.model._has_counter:\n raise IfExistsWithCounterColumn('if_exists cannot be used with tables containing counter columns')\n clone = copy.deepcopy(self)\n clone._if_exists = True\n return clone",
"def is_existing_object(did):\n \"\"\"Return True if PID is for an object for which science bytes are stored locally.\n\n This excludes SIDs and PIDs for unprocessed replica requests, remote or non-existing\n revisions of local replicas and objects aggregated in Resource Maps.\n\n \"\"\"\n return d1_gmn.app.models.ScienceObject.objects.filter(pid__did=did).exists()",
"def exists(name, tags=None, region=None, key=None, keyid=None, profile=None):\n '''\n Check to see if an RDS exists.\n\n CLI example::\n\n salt myminion boto_rds.exists myrds region=us-east-1\n '''\n conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)\n\n try:\n rds = conn.describe_db_instances(DBInstanceIdentifier=name)\n return {'exists': bool(rds)}\n except ClientError as e:\n return {'error': __utils__['boto3.get_error'](e)}",
"def exists(self, **kwargs):\n \"\"\"Check for the existence of the named object on the BigIP\n\n Sends an HTTP GET to the URI of the named object and if it fails with\n a :exc:~requests.HTTPError` exception it checks the exception for\n status code of 404 and returns :obj:`False` in that case.\n\n If the GET is successful it must then check the contents of the json\n contained in the response, this is because the \"pool/... /members\"\n resource provided by the server returns a status code of 200 for\n queries that do not correspond to an existing configuration. Therefore\n this method checks for the presence of the \"address\" key in the\n response JSON... of course, this means that exists depends on an\n unexpected idiosyncrancy of the server, and might break with version\n updates, edge cases, or other unpredictable changes.\n\n :param kwargs: Keyword arguments required to get objects, \"partition\"\n and \"name\" are required\n\n NOTE: If kwargs has a 'requests_params' key the corresponding dict will\n be passed to the underlying requests.session.get method where it will\n be handled according to that API. THIS IS HOW TO PASS QUERY-ARGS!\n :returns: bool -- The objects exists on BigIP or not.\n :raises: :exc:`requests.HTTPError`, Any HTTP error that was not status\n code 404.\n \"\"\"\n requests_params = self._handle_requests_params(kwargs)\n self._check_load_parameters(**kwargs)\n kwargs['uri_as_parts'] = True\n session = self._meta_data['bigip']._meta_data['icr_session']\n base_uri = self._meta_data['container']._meta_data['uri']\n kwargs.update(requests_params)\n try:\n response = session.get(base_uri, **kwargs)\n except HTTPError as err:\n if err.response.status_code == 404:\n return False\n else:\n raise\n rdict = response.json()\n if \"address\" not in rdict:\n # We can add 'or' conditions to be more restrictive.\n return False\n # Only after all conditions are met...\n return True",
"def get_ldap(cls, global_options=None):\n \"\"\"\n Returns the ldap module. The unit test harness will assign a mock object\n to _LDAPConfig.ldap. It is imperative that the ldap module not be\n imported anywhere else so that the unit tests will pass in the absence\n of python-ldap.\n \"\"\"\n if cls.ldap is None:\n import ldap.filter\n\n # Support for python-ldap < 2.0.6\n try:\n import ldap.dn\n except ImportError:\n from django_auth_ldap import dn\n ldap.dn = dn\n\n cls.ldap = ldap\n\n # Apply global LDAP options once\n if (not cls._ldap_configured) and (global_options is not None):\n for opt, value in global_options.items():\n cls.ldap.set_option(opt, value)\n\n cls._ldap_configured = True\n\n return cls.ldap",
"def isAuth(self):\n '''\n Indicates that object auth worked\n :return: True or False\n '''\n if isinstance(self.__conn, ldap.ldapobject.LDAPObject) or self.__conn:\n return True\n return False",
"async def exist(self, key, param=None):\n \"\"\"see if specific identity exists\"\"\"\n identity = self._gen_identity(key, param)\n return await self.client.exists(identity)"
] |
[
0.7273830771446228,
0.7265703678131104,
0.707835853099823,
0.7018440365791321,
0.6881299018859863,
0.6857613921165466,
0.6838850378990173,
0.683398425579071,
0.6821202039718628,
0.6818259954452515,
0.6811388731002808,
0.6797795295715332
] |
Modify a changes to add an automatically generated uidNumber.
|
def save_account(changes: Changeset, table: LdapObjectClass, database: Database) -> Changeset:
""" Modify a changes to add an automatically generated uidNumber. """
d = {}
settings = database.settings
uid_number = changes.get_value_as_single('uidNumber')
if uid_number is None:
scheme = settings['NUMBER_SCHEME']
first = settings.get('UID_FIRST', 10000)
d['uidNumber'] = Counters.get_and_increment(
scheme, "uidNumber", first,
lambda n: not _check_exists(database, table, 'uidNumber', n)
)
changes = changes.merge(d)
return changes
|
[
"def _add_uid(self, uid, skip_handle=False):\n \"\"\"Add unique identifier in correct field.\n\n The ``skip_handle`` flag is used when adding a uid through the add_url function\n since urls can be easily confused with handle elements.\n \"\"\"\n # We might add None values from wherever. Kill them here.\n uid = uid or ''\n if is_arxiv(uid):\n self._ensure_reference_field('arxiv_eprint', normalize_arxiv(uid))\n elif idutils.is_doi(uid):\n self._ensure_reference_field('dois', [])\n self.obj['reference']['dois'].append(idutils.normalize_doi(uid))\n elif idutils.is_handle(uid) and not skip_handle:\n self._ensure_reference_field('persistent_identifiers', [])\n self.obj['reference']['persistent_identifiers'].append({\n 'schema': 'HDL',\n 'value': idutils.normalize_handle(uid),\n })\n elif idutils.is_urn(uid):\n self._ensure_reference_field('persistent_identifiers', [])\n self.obj['reference']['persistent_identifiers'].append({\n 'schema': 'URN',\n 'value': uid,\n })\n elif self.RE_VALID_CNUM.match(uid):\n self._ensure_reference_field('publication_info', {})\n self.obj['reference']['publication_info']['cnum'] = uid\n elif is_cds_url(uid):\n self._ensure_reference_field('external_system_identifiers', [])\n self.obj['reference']['external_system_identifiers'].append({\n 'schema': 'CDS',\n 'value': extract_cds_id(uid),\n })\n elif is_ads_url(uid):\n self._ensure_reference_field('external_system_identifiers', [])\n self.obj['reference']['external_system_identifiers'].append({\n 'schema': 'ADS',\n 'value': extract_ads_id(uid),\n })\n else:\n # ``idutils.is_isbn`` is too strict in what it accepts.\n try:\n isbn = str(ISBN(uid))\n self._ensure_reference_field('isbn', {})\n self.obj['reference']['isbn'] = isbn\n except Exception:\n raise ValueError('Unrecognized uid type')",
"public void renumberUniqueIDs()\n {\n int uid = firstUniqueID();\n for (T entity : this)\n {\n entity.setUniqueID(Integer.valueOf(uid++));\n }\n }",
"def replace_vobject(self, uid, ical, filename=None):\n \"\"\"Update the Remind command with the uid in the file with the new iCalendar\"\"\"\n if not filename:\n filename = self._filename\n elif filename not in self._reminders:\n return\n\n uid = uid.split('@')[0]\n\n with self._lock:\n rem = open(filename).readlines()\n for (index, line) in enumerate(rem):\n if uid == md5(line[:-1].encode('utf-8')).hexdigest():\n rem[index] = self.to_reminders(ical)\n new_uid = self._get_uid(rem[index])\n open(filename, 'w').writelines(rem)\n return new_uid",
"def to_modify(self, uid):\n '''\n Try to modify the page.\n '''\n\n kwd = {\n 'pager': '',\n\n }\n self.render('wiki_page/page_edit.html',\n postinfo=MWiki.get_by_uid(uid),\n kwd=kwd,\n cfg=CMS_CFG,\n userinfo=self.userinfo)",
"def replace_uid(old_uwnetid, new_uwnetid, no_custom_fields=True):\n \"\"\"\n Return a list of BridgeUser objects without custom fields\n \"\"\"\n url = author_uid_url(old_uwnetid)\n if not no_custom_fields:\n url += (\"?%s\" % CUSTOM_FIELD)\n resp = patch_resource(url, '{\"user\":{\"uid\":\"%s@uw.edu\"}}' % new_uwnetid)\n return _process_json_resp_data(resp,\n no_custom_fields=no_custom_fields)",
"def modify_user(username, locked=false, shell=nil)\n shell ||= DEFAULT_SHELLS.detect { |sh| File.exists?(sh) }\n\n usermod = find_sbin('usermod')\n\n if locked\n # the man page claims that \"1\" works here, but testing proves that it doesn't.\n # use 1970 instead.\n dash_e = \"-e 1970-01-01 -L\"\n else\n dash_e = \"-e 99999 -U\"\n end\n\n unless shell.nil?\n dash_s = \"-s #{Shellwords.escape(shell)}\"\n end\n\n result = sudo(\"#{usermod} #{dash_e} #{dash_s} #{Shellwords.escape(username)}\")\n\n case result.exitstatus\n when 0\n RightScale::Log.info(\"LoginUserManager modified #{username} successfully\")\n else\n RightScale::Log.error(\"LoginUserManager failed to modify #{username}\")\n end\n\n true\n end",
"def to_modify(self, uid):\n '''\n Try to edit the link.\n '''\n if self.userinfo.role[1] >= '3':\n pass\n else:\n return False\n\n self.render('misc/link/link_edit.html',\n kwd={},\n postinfo=MLink.get_by_uid(uid),\n userinfo=self.userinfo)",
"def _changes(name,\n uid=None,\n gid=None,\n groups=None,\n optional_groups=None,\n remove_groups=True,\n home=None,\n createhome=True,\n password=None,\n enforce_password=True,\n empty_password=False,\n shell=None,\n fullname='',\n roomnumber='',\n workphone='',\n homephone='',\n other='',\n loginclass=None,\n date=None,\n mindays=0,\n maxdays=999999,\n inactdays=0,\n warndays=7,\n expire=None,\n win_homedrive=None,\n win_profile=None,\n win_logonscript=None,\n win_description=None,\n allow_uid_change=False,\n allow_gid_change=False):\n '''\n Return a dict of the changes required for a user if the user is present,\n otherwise return False.\n\n Updated in 2015.8.0 to include support for windows homedrive, profile,\n logonscript, and description fields.\n\n Updated in 2014.7.0 to include support for shadow attributes, all\n attributes supported as integers only.\n '''\n\n if 'shadow.info' in __salt__:\n lshad = __salt__['shadow.info'](name)\n\n lusr = __salt__['user.info'](name)\n if not lusr:\n return False\n\n change = {}\n if groups is None:\n groups = lusr['groups']\n wanted_groups = sorted(set((groups or []) + (optional_groups or [])))\n if uid and lusr['uid'] != uid:\n change['uid'] = uid\n if gid is not None and lusr['gid'] not in (gid, __salt__['file.group_to_gid'](gid)):\n change['gid'] = gid\n default_grp = __salt__['file.gid_to_group'](\n gid if gid is not None else lusr['gid']\n )\n # remove the default group from the list for comparison purposes\n if default_grp in lusr['groups']:\n lusr['groups'].remove(default_grp)\n if name in lusr['groups'] and name not in wanted_groups:\n lusr['groups'].remove(name)\n # remove default group from wanted_groups, as this requirement is\n # already met\n if default_grp in wanted_groups:\n wanted_groups.remove(default_grp)\n if _group_changes(lusr['groups'], wanted_groups, remove_groups):\n change['groups'] = wanted_groups\n if home and lusr['home'] != home:\n change['home'] = home\n if createhome:\n newhome = home if home else lusr['home']\n if newhome is not None and not os.path.isdir(newhome):\n change['homeDoesNotExist'] = newhome\n if shell and lusr['shell'] != shell:\n change['shell'] = shell\n if 'shadow.info' in __salt__ and 'shadow.default_hash' in __salt__:\n if password and not empty_password:\n default_hash = __salt__['shadow.default_hash']()\n if lshad['passwd'] == default_hash \\\n or lshad['passwd'] != default_hash and enforce_password:\n if lshad['passwd'] != password:\n change['passwd'] = password\n if empty_password and lshad['passwd'] != '':\n change['empty_password'] = True\n if date is not None and lshad['lstchg'] != date:\n change['date'] = date\n if mindays is not None and lshad['min'] != mindays:\n change['mindays'] = mindays\n if maxdays is not None and lshad['max'] != maxdays:\n change['maxdays'] = maxdays\n if inactdays is not None and lshad['inact'] != inactdays:\n change['inactdays'] = inactdays\n if warndays is not None and lshad['warn'] != warndays:\n change['warndays'] = warndays\n if expire and lshad['expire'] != expire:\n change['expire'] = expire\n elif 'shadow.info' in __salt__ and salt.utils.platform.is_windows():\n if expire and expire is not -1 and salt.utils.dateutils.strftime(lshad['expire']) != salt.utils.dateutils.strftime(expire):\n change['expire'] = expire\n\n # GECOS fields\n fullname = salt.utils.data.decode(fullname)\n lusr['fullname'] = salt.utils.data.decode(lusr['fullname'])\n if fullname is not None and lusr['fullname'] != fullname:\n change['fullname'] = fullname\n if win_homedrive and lusr['homedrive'] != win_homedrive:\n change['homedrive'] = win_homedrive\n if win_profile and lusr['profile'] != win_profile:\n change['profile'] = win_profile\n if win_logonscript and lusr['logonscript'] != win_logonscript:\n change['logonscript'] = win_logonscript\n if win_description and lusr['description'] != win_description:\n change['description'] = win_description\n\n # MacOS doesn't have full GECOS support, so check for the \"ch\" functions\n # and ignore these parameters if these functions do not exist.\n if 'user.chroomnumber' in __salt__ \\\n and roomnumber is not None:\n roomnumber = salt.utils.data.decode(roomnumber)\n lusr['roomnumber'] = salt.utils.data.decode(lusr['roomnumber'])\n if lusr['roomnumber'] != roomnumber:\n change['roomnumber'] = roomnumber\n if 'user.chworkphone' in __salt__ \\\n and workphone is not None:\n workphone = salt.utils.data.decode(workphone)\n lusr['workphone'] = salt.utils.data.decode(lusr['workphone'])\n if lusr['workphone'] != workphone:\n change['workphone'] = workphone\n if 'user.chhomephone' in __salt__ \\\n and homephone is not None:\n homephone = salt.utils.data.decode(homephone)\n lusr['homephone'] = salt.utils.data.decode(lusr['homephone'])\n if lusr['homephone'] != homephone:\n change['homephone'] = homephone\n if 'user.chother' in __salt__ and other is not None:\n other = salt.utils.data.decode(other)\n lusr['other'] = salt.utils.data.decode(lusr['other'])\n if lusr['other'] != other:\n change['other'] = other\n # OpenBSD/FreeBSD login class\n if __grains__['kernel'] in ('OpenBSD', 'FreeBSD'):\n if loginclass:\n if __salt__['user.get_loginclass'](name) != loginclass:\n change['loginclass'] = loginclass\n\n errors = []\n if not allow_uid_change and 'uid' in change:\n errors.append(\n 'Changing uid ({0} -> {1}) not permitted, set allow_uid_change to '\n 'True to force this change. Note that this will not change file '\n 'ownership.'.format(lusr['uid'], uid)\n )\n if not allow_gid_change and 'gid' in change:\n errors.append(\n 'Changing gid ({0} -> {1}) not permitted, set allow_gid_change to '\n 'True to force this change. Note that this will not change file '\n 'ownership.'.format(lusr['gid'], gid)\n )\n if errors:\n raise CommandExecutionError(\n 'Encountered error checking for needed changes',\n info=errors\n )\n\n return change",
"def modify_meta(uid, data_dic, extinfo=None):\n '''\n update meta of the rec.\n '''\n if extinfo is None:\n extinfo = {}\n title = data_dic['title'].strip()\n if len(title) < 2:\n return False\n\n cur_info = MPost.get_by_uid(uid)\n if cur_info:\n # ToDo: should not do this. Not for 's'\n if DB_CFG['kind'] == 's':\n entry = TabPost.update(\n title=title,\n user_name=data_dic['user_name'],\n keywords='',\n time_update=tools.timestamp(),\n date=datetime.now(),\n cnt_md=data_dic['cnt_md'],\n memo=data_dic['memo'] if 'memo' in data_dic else '',\n logo=data_dic['logo'],\n order=data_dic['order'],\n cnt_html=tools.markdown2html(data_dic['cnt_md']),\n valid=data_dic['valid']\n ).where(TabPost.uid == uid)\n entry.execute()\n else:\n cur_extinfo = cur_info.extinfo\n # Update the extinfo, Not replace\n for key in extinfo:\n cur_extinfo[key] = extinfo[key]\n\n entry = TabPost.update(\n title=title,\n user_name=data_dic['user_name'],\n keywords='',\n time_update=tools.timestamp(),\n date=datetime.now(),\n cnt_md=data_dic['cnt_md'],\n memo=data_dic['memo'] if 'memo' in data_dic else '',\n logo=data_dic['logo'],\n order=data_dic['order'] if 'order' in data_dic else '',\n cnt_html=tools.markdown2html(data_dic['cnt_md']),\n extinfo=cur_extinfo,\n valid=data_dic['valid']\n ).where(TabPost.uid == uid)\n entry.execute()\n else:\n return MPost.add_meta(uid, data_dic, extinfo)\n return uid",
"def add_uuid(dom, uuid):\n \"\"\"\n Add ``<mods:identifier>`` with `uuid`.\n \"\"\"\n mods_tag = get_mods_tag(dom)\n\n uuid_tag = dhtmlparser.HTMLElement(\n \"mods:identifier\",\n {\"type\": \"uuid\"},\n [dhtmlparser.HTMLElement(uuid)]\n )\n\n insert_tag(uuid_tag, dom.find(\"mods:identifier\"), mods_tag)",
"def modify_init(uid, data_dic):\n '''\n update when init.\n '''\n postinfo = MPost.get_by_uid(uid)\n entry = TabPost.update(\n time_update=tools.timestamp(),\n date=datetime.now(),\n kind=data_dic['kind'] if 'kind' in data_dic else postinfo.kind,\n keywords=data_dic['keywords'] if 'keywords' in data_dic else postinfo.keywords,\n ).where(TabPost.uid == uid)\n entry.execute()\n return uid",
"def update(self, uid: str, data={}) -> str:\n \"\"\"\n Specifies new values for the customizable messages in a form (specified by form_id).\n You can format messages with bold (*bold*) and italic (_italic_) text. HTML tags are forbidden.\n Return a `str` based on success of change, `OK` on success, otherwise an error message.\n \"\"\"\n return self.__client.request('put', '/forms/%s/messages' % uid, data=data)"
] |
[
0.6978135704994202,
0.6813786625862122,
0.681290328502655,
0.6805565357208252,
0.6763060688972473,
0.6754458546638489,
0.6754096746444702,
0.6735838055610657,
0.6723365783691406,
0.6688442826271057,
0.6684103012084961,
0.6675044298171997
] |
Replaces instances of
switch expression:
by
for __case in _Switch(n):
and replaces
case expression:
by
if __case(expression):
and
default:
by
if __case():
|
def transform_source(text):
'''Replaces instances of
switch expression:
by
for __case in _Switch(n):
and replaces
case expression:
by
if __case(expression):
and
default:
by
if __case():
'''
toks = tokenize.generate_tokens(StringIO(text).readline)
result = []
replacing_keyword = False
for toktype, tokvalue, _, _, _ in toks:
if toktype == tokenize.NAME and tokvalue == 'switch':
result.extend([
(tokenize.NAME, 'for'),
(tokenize.NAME, '__case'),
(tokenize.NAME, 'in'),
(tokenize.NAME, '_Switch'),
(tokenize.OP, '(')
])
replacing_keyword = True
elif toktype == tokenize.NAME and (tokvalue == 'case' or tokvalue == 'default'):
result.extend([
(tokenize.NAME, 'if'),
(tokenize.NAME, '__case'),
(tokenize.OP, '(')
])
replacing_keyword = True
elif replacing_keyword and tokvalue == ':':
result.extend([
(tokenize.OP, ')'),
(tokenize.OP, ':')
])
replacing_keyword = False
else:
result.append((toktype, tokvalue))
return tokenize.untokenize(result)
|
[
"def p_switch_statement(self, p):\n \"\"\"switch_statement : SWITCH LPAREN expr RPAREN case_block\"\"\"\n cases = []\n default = None\n # iterate over return values from case_block\n for item in p[5]:\n if isinstance(item, ast.Default):\n default = item\n elif isinstance(item, list):\n cases.extend(item)\n\n p[0] = ast.Switch(expr=p[3], cases=cases, default=default)",
"def p_switch_statement(self, p):\n \"\"\"switch_statement : SWITCH LPAREN expr RPAREN case_block\"\"\"\n # this uses a completely different type that corrects a\n # subtly wrong interpretation of this construct.\n # see: https://github.com/rspivak/slimit/issues/94\n p[0] = self.asttypes.Switch(expr=p[3], case_block=p[5])\n p[0].setpos(p)\n return",
"def _handle_switch(self, node, scope, ctxt, stream):\n \"\"\"Handle break node\n\n :node: TODO\n :scope: TODO\n :ctxt: TODO\n :stream: TODO\n :returns: TODO\n\n \"\"\"\n def exec_case(idx, cases):\n # keep executing cases until a break is found,\n # or they've all been executed\n for case in cases[idx:]:\n stmts = case.stmts\n try:\n for stmt in stmts:\n self._handle_node(stmt, scope, ctxt, stream)\n except errors.InterpBreak as e:\n break\n\n def get_stmts(stmts, res=None):\n if res is None:\n res = []\n\n stmts = self._flatten_list(stmts)\n for stmt in stmts:\n if isinstance(stmt, tuple):\n stmt = stmt[1]\n\n res.append(stmt)\n\n if stmt.__class__ in [AST.Case, AST.Default]:\n get_stmts(stmt.stmts, res)\n\n return res\n\n def get_cases(nodes, acc=None):\n cases = []\n\n stmts = get_stmts(nodes)\n for stmt in stmts:\n if stmt.__class__ in [AST.Case, AST.Default]:\n cases.append(stmt)\n stmt.stmts = []\n else:\n cases[-1].stmts.append(stmt)\n\n return cases\n\n cond = self._handle_node(node.cond, scope, ctxt, stream)\n \n default_idx = None\n found_match = False\n\n cases = getattr(node, \"pfp_cases\", None)\n if cases is None:\n cases = get_cases(node.stmt.children())\n node.pfp_cases = cases\n\n for idx,child in enumerate(cases):\n if child.__class__ == AST.Default:\n default_idx = idx\n continue\n elif child.__class__ == AST.Case:\n expr = self._handle_node(child.expr, scope, ctxt, stream)\n if expr == cond:\n found_match = True\n exec_case(idx, cases)\n break\n\n if default_idx is not None and not found_match:\n exec_case(default_idx, cases)",
"def p_statement_switch(p):\n 'statement : SWITCH LPAREN expr RPAREN switch_case_list'\n p[0] = ast.Switch(p[3], p[5], lineno=p.lineno(1))",
"def p_switch_stmt(p):\n \"\"\"\n switch_stmt : SWITCH expr semi_opt case_list END_STMT\n \"\"\"\n\n def backpatch(expr, stmt):\n if isinstance(stmt, node.if_stmt):\n stmt.cond_expr.args[1] = expr\n backpatch(expr, stmt.else_stmt)\n\n backpatch(p[2], p[4])\n p[0] = p[4]",
"def _switch(expr, *args, **kw):\n \"\"\"\n Similar to the case-when in SQL. Refer to the example below\n\n :param expr:\n :param args:\n :param kw:\n :return: sequence or scalar\n\n :Example:\n\n >>> # if df.id == 3 then df.name\n >>> # elif df.id == df.fid.abs() then df.name + 'test'\n >>> # default: 'test'\n >>> df.id.switch(3, df.name, df.fid.abs(), df.name + 'test', default='test')\n \"\"\"\n default = _scalar(kw.get('default'))\n\n if len(args) <= 0:\n raise errors.ExpressionError('Switch must accept more than one condition')\n\n if all(isinstance(arg, tuple) and len(arg) == 2 for arg in args):\n conditions, thens = zip(*args)\n else:\n conditions = [arg for i, arg in enumerate(args) if i % 2 == 0]\n thens = [arg for i, arg in enumerate(args) if i % 2 == 1]\n\n if len(conditions) == len(thens):\n conditions, thens = _scalar(conditions), _scalar(thens)\n else:\n raise errors.ExpressionError('Switch should be called by case and then pairs')\n\n if isinstance(expr, (Scalar, SequenceExpr)):\n case = expr\n else:\n case = None\n if not all(hasattr(it, 'dtype') and it.dtype == types.boolean for it in conditions):\n raise errors.ExpressionError('Switch must be called by all boolean conditions')\n\n res = thens if default is None else thens + [default, ]\n output_type = utils.highest_precedence_data_type(*(it.dtype for it in res))\n\n is_seq = isinstance(expr, SequenceExpr) or \\\n any(isinstance(it, SequenceExpr) for it in conditions) or \\\n any(isinstance(it, SequenceExpr) for it in res)\n if case is not None:\n is_seq = is_seq or isinstance(case, SequenceExpr)\n\n kwargs = dict()\n if is_seq:\n kwargs['_data_type'] = output_type\n else:\n kwargs['_value_type'] = output_type\n return Switch(_input=expr, _case=case, _conditions=conditions,\n _thens=thens, _default=default, **kwargs)",
"private Node tryOptimizeSwitch(Node n) {\n checkState(n.isSwitch(), n);\n\n Node defaultCase = tryOptimizeDefaultCase(n);\n\n // Generally, it is unsafe to remove other cases when the default case is not the last one.\n if (defaultCase == null || n.getLastChild().isDefaultCase()) {\n Node cond = n.getFirstChild();\n Node prev = null;\n Node next = null;\n Node cur;\n\n for (cur = cond.getNext(); cur != null; cur = next) {\n next = cur.getNext();\n if (!mayHaveSideEffects(cur.getFirstChild()) && isUselessCase(cur, prev, defaultCase)) {\n removeCase(n, cur);\n } else {\n prev = cur;\n }\n }\n\n // Optimize switches with constant condition\n if (NodeUtil.isLiteralValue(cond, false)) {\n Node caseLabel;\n TernaryValue caseMatches = TernaryValue.TRUE;\n // Remove cases until you find one that may match\n for (cur = cond.getNext(); cur != null; cur = next) {\n next = cur.getNext();\n caseLabel = cur.getFirstChild();\n caseMatches = PeepholeFoldConstants.evaluateComparison(this, Token.SHEQ, cond, caseLabel);\n if (caseMatches == TernaryValue.TRUE) {\n break;\n } else if (caseMatches == TernaryValue.UNKNOWN) {\n break;\n } else {\n removeCase(n, cur);\n }\n }\n if (cur != null && caseMatches == TernaryValue.TRUE) {\n // Skip cases until you find one whose last stm is a removable break\n Node matchingCase = cur;\n Node matchingCaseBlock = matchingCase.getLastChild();\n while (cur != null) {\n Node block = cur.getLastChild();\n Node lastStm = block.getLastChild();\n boolean isLastStmRemovableBreak = false;\n if (lastStm != null && isExit(lastStm)) {\n removeIfUnnamedBreak(lastStm);\n isLastStmRemovableBreak = true;\n }\n next = cur.getNext();\n // Remove the fallthrough case labels\n if (cur != matchingCase) {\n while (block.hasChildren()) {\n matchingCaseBlock.addChildToBack(block.getFirstChild().detach());\n }\n reportChangeToEnclosingScope(cur);\n cur.detach();\n }\n cur = next;\n if (isLastStmRemovableBreak) {\n break;\n }\n }\n\n // Remove any remaining cases\n for (; cur != null; cur = next) {\n next = cur.getNext();\n removeCase(n, cur);\n }\n // If there is one case left, we may be able to fold it\n cur = cond.getNext();\n if (cur != null && cur.getNext() == null) {\n return tryRemoveSwitchWithSingleCase(n, false);\n }\n }\n }\n }\n\n return tryRemoveSwitch(n);\n }",
"function parseSwitchCase() {\n var test,\n consequent = [],\n statement;\n\n if (matchKeyword('default')) {\n lex();\n test = null;\n } else {\n expectKeyword('case');\n test = parseExpression();\n }\n expect(':');\n\n while (index < length) {\n if (match('}') || matchKeyword('default') || matchKeyword('case')) {\n break;\n }\n statement = parseStatement();\n if (typeof statement === 'undefined') {\n break;\n }\n consequent.push(statement);\n }\n\n return {\n type: Syntax.SwitchCase,\n test: test,\n consequent: consequent\n };\n }",
"function parseSwitchCase() {\n var test, consequent = [], statement, node = new Node();\n\n if (matchKeyword('default')) {\n lex();\n test = null;\n } else {\n expectKeyword('case');\n test = parseExpression();\n }\n expect(':');\n\n while (startIndex < length) {\n if (match('}') || matchKeyword('default') || matchKeyword('case')) {\n break;\n }\n statement = parseStatementListItem();\n consequent.push(statement);\n }\n\n return node.finishSwitchCase(test, consequent);\n }",
"function parseSwitchCase() {\n var test, consequent = [], statement, startToken;\n\n startToken = lookahead;\n if (matchKeyword('default')) {\n lex();\n test = null;\n } else {\n expectKeyword('case');\n test = parseExpression();\n }\n expect(':');\n\n while (index < length) {\n if (match('}') || matchKeyword('default') || matchKeyword('case')) {\n break;\n }\n statement = parseStatement();\n consequent.push(statement);\n }\n\n return delegate.markEnd(delegate.createSwitchCase(test, consequent), startToken);\n }",
"void expectSwitchMatchesCase(Node n, JSType switchType, JSType caseType) {\n // ECMA-262, page 68, step 3 of evaluation of CaseBlock,\n // but allowing extra autoboxing.\n // TODO(user): remove extra conditions when type annotations\n // in the code base have adapted to the change in the compiler.\n if (!switchType.canTestForShallowEqualityWith(caseType)\n && (caseType.autoboxesTo() == null || !caseType.autoboxesTo().isSubtypeOf(switchType))) {\n mismatch(n.getFirstChild(), \"case expression doesn't match switch\", caseType, switchType);\n } else if (!switchType.canTestForShallowEqualityWith(caseType)\n && (caseType.autoboxesTo() == null\n || !caseType.autoboxesTo().isSubtypeWithoutStructuralTyping(switchType))) {\n TypeMismatch.recordImplicitInterfaceUses(this.implicitInterfaceUses, n, caseType, switchType);\n TypeMismatch.recordImplicitUseOfNativeObject(this.mismatches, n, caseType, switchType);\n }\n }",
"def swap_default(mode, equation, symbol_names, default, **kwargs):\n '''\n Given a `sympy` equation or equality, along with a list of symbol names,\n substitute the specified default value for each symbol for which a value is\n not provided through a keyword argument.\n\n For example, consider the following equality:\n\n >>> sp.pprint(H)\n V₂ Z₂\n ── = ──\n V₁ Z₁\n\n Let us substitute a default value of 1 for terms Z1 and Z2:\n\n >>> sp.pprint(subs_default(H, ['Z1', 'Z2'], 1))\n V₂\n ── = 1\n V₁\n\n Now, let us specify a default value of 1 for terms Z1 and Z2, but provide\n an overriding value for Z1:\n\n >>> sp.pprint(subs_default(H, ['Z1', 'Z2'], 1, Z1=4))\n V₂\n ── = 1/4\n V₁\n\n Note that keyword arguments for terms not specified in the list of symbol\n names are ignored:\n\n >>> sp.pprint(subs_default(H, ['Z1', 'Z2'], 1, Z1=4, Q=7))\n V₂\n ── = 1/4\n V₁\n '''\n if mode == 'subs':\n swap_f = _subs\n default_swap_f = _subs\n elif mode == 'limit':\n swap_f = _limit\n default_swap_f = _subs\n elif mode == 'limit_default':\n swap_f = _subs\n default_swap_f = _limit\n else:\n raise ValueError('''Unsupported mode. `mode` must be one of: '''\n '''('subs', 'limit').''')\n\n result = equation\n for s in symbol_names:\n if s in kwargs:\n if isinstance(kwargs[s], Iterable):\n continue\n else:\n result = swap_f(result, s, kwargs[s])\n else:\n result = default_swap_f(result, s, default)\n return result"
] |
[
0.7839750051498413,
0.7434612512588501,
0.7269330024719238,
0.7261450290679932,
0.7179446220397949,
0.710665225982666,
0.7068049311637878,
0.6975150108337402,
0.6962053775787354,
0.6949174404144287,
0.6891921162605286,
0.6840291619300842
] |
Search for entries in LDAP database.
|
def search(self, base, scope, filterstr='(objectClass=*)',
attrlist=None, limit=None) -> Generator[Tuple[str, dict], None, None]:
"""
Search for entries in LDAP database.
"""
_debug("search", base, scope, filterstr, attrlist, limit)
# first results
if attrlist is None:
attrlist = ldap3.ALL_ATTRIBUTES
elif isinstance(attrlist, set):
attrlist = list(attrlist)
def first_results(obj):
_debug("---> searching ldap", limit)
obj.search(
base, filterstr, scope, attributes=attrlist, paged_size=limit)
return obj.response
# get the 1st result
result_list = self._do_with_retry(first_results)
# Loop over list of search results
for result_item in result_list:
# skip searchResRef for now
if result_item['type'] != "searchResEntry":
continue
dn = result_item['dn']
attributes = result_item['raw_attributes']
# did we already retrieve this from cache?
_debug("---> got ldap result", dn)
_debug("---> yielding", result_item)
yield (dn, attributes)
# we are finished - return results, eat cake
_debug("---> done")
return
|
[
"def _search(self, base, fltr, attrs=None, scope=ldap.SCOPE_SUBTREE):\n \"\"\"Perform LDAP search\"\"\"\n try:\n results = self._conn.search_s(base, scope, fltr, attrs)\n except Exception as e:\n log.exception(self._get_ldap_msg(e))\n results = False\n return results",
"def search( self, base=False, trim=False, objects=False, **kwargs ):\n \"\"\" Returns matching entries for search in ldap\n structured as [(dn, {attributes})]\n UNLESS searching by dn, in which case the first match\n is returned\n \"\"\"\n scope = pyldap.SCOPE_SUBTREE\n if not base:\n base = self.users\n\n filterstr =''\n for key, value in kwargs.iteritems():\n filterstr += '({0}={1})'.format(key,value)\n if key == 'dn':\n filterstr = '(objectClass=*)'\n base = value\n scope = pyldap.SCOPE_BASE\n break\n\n if len(kwargs) > 1:\n filterstr = '(&'+filterstr+')'\n\n result = self.ldap.search_s(base, pyldap.SCOPE_SUBTREE, filterstr, ['*','+'])\n if base == self.users:\n for member in result:\n groups = self.getGroups(member[0])\n member[1]['groups'] = groups\n if 'eboard' in member[1]['groups']:\n member[1]['committee'] = self.search(base=self.committees, \\\n head=member[0])[0][1]['cn'][0]\n if objects:\n return self.memberObjects(result)\n finalResult = self.trimResult(result) if trim else result\n return finalResult",
"def search(self, filterstr, attrlist):\n \"\"\"Query the configured LDAP server.\"\"\"\n return self._paged_search_ext_s(self.settings.BASE, ldap.SCOPE_SUBTREE, filterstr=filterstr,\n attrlist=attrlist, page_size=self.settings.PAGE_SIZE)",
"def search(self, filter, base_dn=None, attrs=None, scope=None,\n timeout=None, limit=None):\n \"\"\"\n Search the directory.\n \"\"\"\n if base_dn is None:\n base_dn = self._search_defaults.get('base_dn', '')\n if attrs is None:\n attrs = self._search_defaults.get('attrs', None)\n if scope is None:\n scope = self._search_defaults.get('scope', ldap.SCOPE_SUBTREE)\n if timeout is None:\n timeout = self._search_defaults.get('timeout', -1)\n if limit is None:\n limit = self._search_defaults.get('limit', 0)\n\n results = self.connection.search_ext_s(\n base_dn, scope, filter, attrs, timeout=timeout, sizelimit=limit)\n return self.to_items(results)",
"def search(self, filter, attributes=None):\n \"\"\"Search LDAP for records.\"\"\"\n if attributes is None:\n attributes = ['*']\n\n if filter is None:\n filter = [\"(objectclass=*)\"]\n\n # Convert filter list into an LDAP-consumable format\n filterstr = \"(&{})\".format(''.join(filter))\n self.conn.search(\n search_base=self.basedn,\n search_filter=filterstr,\n search_scope=ldap3.SUBTREE,\n attributes=attributes)\n return self.conn.entries",
"def _search(self, searchfilter, attrs, basedn):\n \"\"\"Generic search\"\"\"\n if attrs == NO_ATTR:\n attrlist = []\n elif attrs == DISPLAYED_ATTRS:\n # fix me later (to much attributes)\n attrlist = self.attrlist\n elif attrs == LISTED_ATTRS:\n attrlist = self.attrlist\n elif attrs == ALL_ATTRS:\n attrlist = None\n else:\n attrlist = None\n\n self._logger(\n severity=logging.DEBUG,\n msg=\"%(backend)s: executing search \"\n \"with filter '%(filter)s' in DN '%(dn)s'\" % {\n 'backend': self.backend_name,\n 'dn': basedn,\n 'filter': self._uni(searchfilter)\n }\n )\n\n # bind and search the ldap\n ldap_client = self._bind()\n try:\n r = ldap_client.search_s(\n basedn,\n ldap.SCOPE_SUBTREE,\n searchfilter,\n attrlist=attrlist\n )\n except Exception as e:\n ldap_client.unbind_s()\n self._exception_handler(e)\n\n ldap_client.unbind_s()\n\n # python-ldap doesn't know utf-8,\n # it treates everything as bytes.\n # So it's necessary to reencode\n # it's output in utf-8.\n ret = []\n for entry in r:\n uni_dn = self._uni(entry[0])\n uni_attrs = {}\n for attr in entry[1]:\n if type(entry[1][attr]) is list:\n tmp = []\n for value in entry[1][attr]:\n tmp.append(self._uni(value))\n else:\n tmp = self._uni(entry[1][attr])\n uni_attrs[self._uni(attr)] = tmp\n ret.append((uni_dn, uni_attrs))\n return ret",
"def search(connect_spec, base, scope='subtree', filterstr='(objectClass=*)',\n attrlist=None, attrsonly=0):\n '''Search an LDAP database.\n\n :param connect_spec:\n See the documentation for the ``connect_spec`` parameter for\n :py:func:`connect`.\n\n :param base:\n Distinguished name of the entry at which to start the search.\n\n :param scope:\n One of the following:\n\n * ``'subtree'``\n Search the base and all of its descendants.\n\n * ``'base'``\n Search only the base itself.\n\n * ``'onelevel'``\n Search only the base's immediate children.\n\n :param filterstr:\n String representation of the filter to apply in the search.\n\n :param attrlist:\n Limit the returned attributes to those in the specified list.\n If ``None``, all attributes of each entry are returned.\n\n :param attrsonly:\n If non-zero, don't return any attribute values.\n\n :returns:\n a dict of results. The dict is empty if there are no results.\n The dict maps each returned entry's distinguished name to a\n dict that maps each of the matching attribute names to a list\n of its values.\n\n CLI example:\n\n .. code-block:: bash\n\n salt '*' ldap3.search \"{\n 'url': 'ldaps://ldap.example.com/',\n 'bind': {\n 'method': 'simple',\n 'dn': 'cn=admin,dc=example,dc=com',\n 'password': 'secret',\n },\n }\" \"base='dc=example,dc=com'\"\n '''\n l = connect(connect_spec)\n scope = getattr(ldap, 'SCOPE_' + scope.upper())\n try:\n results = l.c.search_s(base, scope, filterstr, attrlist, attrsonly)\n except ldap.NO_SUCH_OBJECT:\n results = []\n except ldap.LDAPError as e:\n _convert_exception(e)\n return dict(results)",
"def LDAP_search(pattern_search, attribute):\n \"\"\"\n Do a LDAP search\n \"\"\"\n connection, ldap_base = _get_LDAP_connection()\n\n connection.search(\n search_base=ldap_base,\n search_filter=pattern_search,\n attributes=[attribute]\n )\n return connection.response",
"def execute(self, connection, filterargs=(), escape=True):\n \"\"\"\n Executes the search on the given connection (an LDAPObject). filterargs\n is an object that will be used for expansion of the filter string.\n If escape is True, values in filterargs will be escaped.\n\n The python-ldap library returns utf8-encoded strings. For the sake of\n sanity, this method will decode all result strings and return them as\n Unicode.\n \"\"\"\n if escape:\n filterargs = self._escape_filterargs(filterargs)\n\n try:\n filterstr = self.filterstr % filterargs\n results = connection.search_s(force_str(self.base_dn),\n self.scope,\n force_str(filterstr))\n except ldap.LDAPError as e:\n results = []\n logger.error(u\"search_s('%s', %d, '%s') raised %s\" %\n (self.base_dn, self.scope, filterstr, pprint.pformat(e)))\n\n return self._process_results(results)",
"public List<LDAPEntry> search(LDAPConnection ldapConnection,\n String baseDN, String query) throws GuacamoleException {\n\n logger.debug(\"Searching \\\"{}\\\" for objects matching \\\"{}\\\".\", baseDN, query);\n\n try {\n\n // Search within subtree of given base DN\n LDAPSearchResults results = ldapConnection.search(baseDN,\n LDAPConnection.SCOPE_SUB, query, null, false,\n confService.getLDAPSearchConstraints());\n\n // Produce list of all entries in the search result, automatically\n // following referrals if configured to do so\n List<LDAPEntry> entries = new ArrayList<>(results.getCount());\n while (results.hasMore()) {\n\n try {\n entries.add(results.next());\n }\n\n // Warn if referrals cannot be followed\n catch (LDAPReferralException e) {\n if (confService.getFollowReferrals()) {\n logger.error(\"Could not follow referral: {}\", e.getFailedReferral());\n logger.debug(\"Error encountered trying to follow referral.\", e);\n throw new GuacamoleServerException(\"Could not follow LDAP referral.\", e);\n }\n else {\n logger.warn(\"Given a referral, but referrals are disabled. Error was: {}\", e.getMessage());\n logger.debug(\"Got a referral, but configured to not follow them.\", e);\n }\n }\n \n catch (LDAPException e) {\n logger.warn(\"Failed to process an LDAP search result. Error was: {}\", e.resultCodeToString());\n logger.debug(\"Error processing LDAPEntry search result.\", e);\n }\n\n }\n\n return entries;\n\n }\n catch (LDAPException | GuacamoleException e) {\n throw new GuacamoleServerException(\"Unable to query list of \"\n + \"objects from LDAP directory.\", e);\n }\n\n }",
"def ldap_search(self, filter, attributes, incremental, incremental_filter):\n \"\"\"\n Query the configured LDAP server with the provided search filter and\n attribute list.\n \"\"\"\n for uri in self.conf_LDAP_SYNC_BIND_URI:\n #Read record of this uri\n if (self.working_uri == uri):\n adldap_sync = self.working_adldap_sync\n created = False\n else:\n adldap_sync, created = ADldap_Sync.objects.get_or_create(ldap_sync_uri=uri)\n\n if ((adldap_sync.syncs_to_full > 0) and incremental):\n filter_to_use = incremental_filter.replace('?', self.whenchanged.strftime(self.conf_LDAP_SYNC_INCREMENTAL_TIMESTAMPFORMAT))\n logger.debug(\"Using an incremental search. Filter is:'%s'\" % filter_to_use)\n else:\n filter_to_use = filter\n\n ldap.set_option(ldap.OPT_REFERRALS, 0)\n #ldap.set_option(ldap.OPT_NETWORK_TIMEOUT, 10)\n l = PagedLDAPObject(uri)\n l.protocol_version = 3\n\n if (uri.startswith('ldaps:')):\n l.set_option(ldap.OPT_X_TLS, ldap.OPT_X_TLS_DEMAND)\n l.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_DEMAND)\n l.set_option(ldap.OPT_X_TLS_DEMAND, True)\n else:\n l.set_option(ldap.OPT_X_TLS, ldap.OPT_X_TLS_NEVER)\n l.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER)\n l.set_option(ldap.OPT_X_TLS_DEMAND, False)\n try:\n l.simple_bind_s(self.conf_LDAP_SYNC_BIND_DN, self.conf_LDAP_SYNC_BIND_PASS)\n except ldap.LDAPError as e:\n logger.error(\"Error connecting to LDAP server %s : %s\" % (uri, e))\n continue\n\n results = l.paged_search_ext_s(self.conf_LDAP_SYNC_BIND_SEARCH, ldap.SCOPE_SUBTREE, filter_to_use, attrlist=attributes, serverctrls=None)\n l.unbind_s()\n if (self.working_uri is None):\n self.working_uri = uri\n self.conf_LDAP_SYNC_BIND_URI.insert(0, uri)\n self.working_adldap_sync = adldap_sync\n\n return (uri, results) # Return both the LDAP server URI used and the request. This is for incremental sync purposes\n #if not connected correctly, raise error\n raise",
"def search(table: LdapObjectClass, query: Optional[Q] = None,\n database: Optional[Database] = None, base_dn: Optional[str] = None) -> Iterator[LdapObject]:\n \"\"\" Search for a object of given type in the database. \"\"\"\n fields = table.get_fields()\n db_fields = {\n name: field\n for name, field in fields.items()\n if field.db_field\n }\n\n database = get_database(database)\n connection = database.connection\n\n search_options = table.get_search_options(database)\n\n iterator = tldap.query.search(\n connection=connection,\n query=query,\n fields=db_fields,\n base_dn=base_dn or search_options.base_dn,\n object_classes=search_options.object_class,\n pk=search_options.pk_field,\n )\n\n for dn, data in iterator:\n python_data = _db_to_python(data, table, dn)\n python_data = table.on_load(python_data, database)\n yield python_data"
] |
[
0.8410250544548035,
0.8161965608596802,
0.7980911135673523,
0.7957127690315247,
0.7936100363731384,
0.7926977276802063,
0.7824440002441406,
0.7757538557052612,
0.763999879360199,
0.7509103417396545,
0.7479243874549866,
0.7462848424911499
] |
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
|
def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:
"""
rename a dn in the ldap database; see ldap module. doesn't return a
result if transactions enabled.
"""
raise NotImplementedError()
|
[
"def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:\n \"\"\"\n rename a dn in the ldap database; see ldap module. doesn't return a\n result if transactions enabled.\n \"\"\"\n\n return self._do_with_retry(\n lambda obj: obj.rename_s(dn, new_rdn, new_base_dn))",
"def rename(self, dn: str, new_rdn: str, new_base_dn: Optional[str] = None) -> None:\n \"\"\"\n rename a dn in the ldap database; see ldap module. doesn't return a\n result if transactions enabled.\n \"\"\"\n\n _debug(\"rename\", self, dn, new_rdn, new_base_dn)\n\n # split up the parameters\n split_dn = tldap.dn.str2dn(dn)\n split_newrdn = tldap.dn.str2dn(new_rdn)\n assert(len(split_newrdn) == 1)\n\n # make dn unqualified\n rdn = tldap.dn.dn2str(split_dn[0:1])\n\n # make newrdn fully qualified dn\n tmplist = [split_newrdn[0]]\n if new_base_dn is not None:\n tmplist.extend(tldap.dn.str2dn(new_base_dn))\n old_base_dn = tldap.dn.dn2str(split_dn[1:])\n else:\n tmplist.extend(split_dn[1:])\n old_base_dn = None\n newdn = tldap.dn.dn2str(tmplist)\n\n _debug(\"--> commit \", self, dn, new_rdn, new_base_dn)\n _debug(\"--> rollback\", self, newdn, rdn, old_base_dn)\n\n # on commit carry out action; on rollback reverse rename\n def on_commit(obj):\n obj.modify_dn(dn, new_rdn, new_superior=new_base_dn)\n\n def on_rollback(obj):\n obj.modify_dn(newdn, rdn, new_superior=old_base_dn)\n\n return self._process(on_commit, on_rollback)",
"def rename(python_data: LdapObject, new_base_dn: str = None,\n database: Optional[Database] = None, **kwargs) -> LdapObject:\n \"\"\" Move/rename a LdapObject in the database. \"\"\"\n table = type(python_data)\n dn = python_data.get_as_single('dn')\n assert dn is not None\n\n database = get_database(database)\n connection = database.connection\n\n # extract key and value from kwargs\n if len(kwargs) == 1:\n name, value = list(kwargs.items())[0]\n\n # work out the new rdn of the object\n split_new_rdn = [[(name, value, 1)]]\n\n field = _get_field_by_name(table, name)\n assert field.db_field\n\n python_data = python_data.merge({\n name: value,\n })\n\n elif len(kwargs) == 0:\n split_new_rdn = [str2dn(dn)[0]]\n else:\n assert False\n\n new_rdn = dn2str(split_new_rdn)\n\n connection.rename(\n dn,\n new_rdn,\n new_base_dn,\n )\n\n if new_base_dn is not None:\n split_base_dn = str2dn(new_base_dn)\n else:\n split_base_dn = str2dn(dn)[1:]\n\n tmp_list = [split_new_rdn[0]]\n tmp_list.extend(split_base_dn)\n\n new_dn = dn2str(tmp_list)\n\n python_data = python_data.merge({\n 'dn': new_dn,\n })\n return python_data",
"def rename(dn, new_rdn, delete_old, *args)\n log_dispatch(:rename, dn, new_rdn, delete_old, *args)\n adapter.rename(dn, new_rdn.to_str, delete_old, *args)\n end",
"function rename_group($group_id, $new_name, &$new_gid)\n {\n $group_cache = $this->_fetch_groups();\n $old_dn = $group_cache[$group_id]['dn'];\n $new_rdn = \"cn=\" . rcube_ldap_generic::quote_string($new_name, true);\n $new_gid = self::dn_encode($new_rdn . ',' . $this->groups_base_dn);\n\n if (!$this->ldap->rename($old_dn, $new_rdn, null, true)) {\n $this->set_error(self::ERROR_SAVING, 'errorsaving');\n return false;\n }\n\n if ($this->cache) {\n $this->cache->remove('groups');\n }\n\n return $new_name;\n }",
"public function rename($dn, $newdn, $newparent, $deleteolddn)\n\t{\n\t\tif (!$this->isBound || !$this->isConnected())\n\t\t{\n\t\t\treturn false;\n\t\t}\n\n\t\treturn ldap_rename($this->resource, $dn, $newdn, $newparent, $deleteolddn);\n\t}",
"public function rename($dn, $newRdn, $newParent, $deleteOldRdn = false)\n {\n return ldap_rename($this->getConnection(), $dn, $newRdn, $newParent, $deleteOldRdn);\n }",
"def database_rename(object_id, input_params={}, always_retry=True, **kwargs):\n \"\"\"\n Invokes the /database-xxxx/rename API method.\n\n For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Name#API-method%3A-%2Fclass-xxxx%2Frename\n \"\"\"\n return DXHTTPRequest('/%s/rename' % object_id, input_params, always_retry=always_retry, **kwargs)",
"public function rename($dn, $newrdn, $newparent = null, $deleteoldrdn = true)\n {\n $this->_debug(\"C: Rename $dn to $newrdn\");\n\n if (!ldap_rename($this->conn, $dn, $newrdn, $newparent, $deleteoldrdn)) {\n $this->_error(\"ldap_rename() failed with \" . ldap_error($this->conn));\n return false;\n }\n\n $this->_debug(\"S: OK\");\n return true;\n }",
"@Override\n\tpublic void rename(final Name oldDn, final Name newDn) {\n\t\texecuteReadWrite(new ContextExecutor() {\n\t\t\tpublic Object executeWithContext(DirContext ctx) throws javax.naming.NamingException {\n\t\t\t\tctx.rename(oldDn, newDn);\n\t\t\t\treturn null;\n\t\t\t}\n\t\t});\n\t}",
"public void rename(String dn, String newDn) throws WIMException {\n TimedDirContext ctx = iContextManager.getDirContext();\n iContextManager.checkWritePermission(ctx);\n try {\n try {\n ctx.rename(dn, newDn);\n } catch (NamingException e) {\n if (!ContextManager.isConnectionException(e)) {\n throw e;\n }\n ctx = iContextManager.reCreateDirContext(ctx, e.toString());\n ctx.rename(dn, newDn);\n }\n } catch (NamingException e) {\n String msg = Tr.formatMessage(tc, WIMMessageKey.NAMING_EXCEPTION, WIMMessageHelper.generateMsgParms(e.toString(true)));\n throw new WIMSystemException(WIMMessageKey.NAMING_EXCEPTION, msg, e);\n } finally {\n iContextManager.releaseDirContext(ctx);\n }\n }",
"public void rollback() {\n log.debug(\"Rolling back rename operation\");\n try {\n ldapOperations.rename(newDn, originalDn);\n } catch (Exception e) {\n log.warn(\"Unable to rollback rename operation. \" + \"originalDn: \"\n + newDn + \"; newDn: \" + originalDn);\n }\n }"
] |
[
0.8929044604301453,
0.8716602325439453,
0.8117704391479492,
0.790888786315918,
0.7474083304405212,
0.7388917207717896,
0.7381210923194885,
0.7348619699478149,
0.7344306111335754,
0.7342362999916077,
0.7287878394126892,
0.7264547348022461
] |
Example shows how to configure environment from scratch
|
def prepare_env(org):
""" Example shows how to configure environment from scratch """
# Add services
key_service = org.service(type='builtin:cobalt_secure_store', name='Keystore')
wf_service = org.service(type='builtin:workflow_service', name='Workflow', parameters='{}')
# Add services to environment
env = org.environment(name='default')
env.clean()
env.add_service(key_service)
env.add_service(wf_service)
env.add_policy(
{"action": "provisionVms",
"parameter": "publicKeyId",
"value": key_service.regenerate()['id']})
# Add cloud provider account
access = {
"provider": "aws-ec2",
"usedEnvironments": [],
"ec2SecurityGroup": "default",
"providerCopy": "aws-ec2",
"name": "test-provider",
"jcloudsIdentity": KEY,
"jcloudsCredential": SECRET_KEY,
"jcloudsRegions": "us-east-1"
}
prov = org.provider(access)
env.add_provider(prov)
return org.organizationId
|
[
"def configure(self, remotes=None,\n client_id=None,\n start_timeout=None, docker_image=None,\n ignore_clock_skew=False, disable_action_probes=False,\n vnc_driver=None, vnc_kwargs=None,\n rewarder_driver=None,\n replace_on_crash=False, allocate_sync=True,\n observer=False, api_key=None,\n record=False,\n sample_env_ids=None,\n ):\n \"\"\"Universe method to configure the environment.\n\n Args:\n\n ignore_clock_skew (bool): Assume remotes are on the same machine as us,\n for the purposes of diagnostics measurement.\n\n If true, we skip measuring the clock skew over the network,\n and skip generating diagnostics which rely on it.\n\n True when used by the rewarder to measure latency between\n the VNC frame and its calculation of reward for that\n frame. In this case we share a common clock with the env\n generating the VNC frame, so we don't need to send/receive\n probes. Clock skew is zero in this case.\n\n False when remotes are potentially different machines\n (such as an agent, or a demonstrator), and we will be\n sending probe keys and measuring network ping rountrip\n times to calculate clock skew.\n \"\"\"\n if self._started:\n raise error.Error('{} has already been started; cannot change configuration now.'.format(self))\n\n universe.configure_logging()\n\n twisty.start_once()\n\n if self.spec is not None:\n runtime = registration.runtime_spec(self.spec.tags['runtime'])\n # Let the user manually set the docker_image version\n if docker_image:\n # TODO: don't support this option?\n runtime.image = docker_image\n else:\n runtime = None\n\n if remotes is None:\n remotes = os.environ.get('GYM_VNC_REMOTES', '1')\n\n if client_id is None:\n client_id = default_client_id()\n\n if vnc_kwargs is None:\n vnc_kwargs = {}\n\n self.remote_manager, self.n = remotes_module.build(\n client_id=client_id,\n remotes=remotes, runtime=runtime, start_timeout=start_timeout,\n api_key=api_key,\n use_recorder_ports=record,\n )\n self.connection_names = [None] * self.n\n self.connection_labels = [None] * self.n\n self.crashed = {}\n\n self.allow_reconnect = replace_on_crash and self.remote_manager.supports_reconnect\n if self.remote_manager.connect_vnc:\n cls = vnc_session(vnc_driver)\n vnc_kwargs.setdefault('start_timeout', self.remote_manager.start_timeout)\n if runtime == 'gym-core':\n vnc_kwargs.setdefault('encoding', 'zrle')\n else:\n vnc_kwargs.setdefault('encoding', 'tight')\n vnc_kwargs.setdefault('fine_quality_level', 50)\n vnc_kwargs.setdefault('subsample_level', 2)\n # Filter out None values, since some drivers may not handle them correctly\n vnc_kwargs = {k: v for k, v in vnc_kwargs.items() if v is not None}\n logger.info('Using VNCSession arguments: %s. (Customize by running \"env.configure(vnc_kwargs={...})\"', vnc_kwargs)\n self.vnc_kwargs = vnc_kwargs\n self.vnc_session = cls()\n else:\n self.vnc_session = None\n\n self._observer = observer\n if self.remote_manager.connect_rewarder:\n cls = rewarder_session(rewarder_driver)\n self.rewarder_session = cls()\n else:\n self.rewarder_session = None\n\n if ignore_clock_skew:\n logger.info('Printed stats will ignore clock skew. (This usually makes sense only when the environment and agent are on the same machine.)')\n\n if self.rewarder_session or ignore_clock_skew:\n # Don't need rewarder session if we're ignoring clock skew\n if self.spec is not None:\n metadata_encoding = self.spec.tags.get('metadata_encoding')\n else:\n metadata_encoding = None\n self.diagnostics = diagnostics.Diagnostics(self.n, self._probe_key, ignore_clock_skew, metadata_encoding=metadata_encoding, disable_action_probes=disable_action_probes)\n else:\n self.diagnostics = None\n\n self._sample_env_ids = sample_env_ids\n\n self._reset_mask()\n self._started = True\n\n self.remote_manager.allocate([str(i) for i in range(self.n)], initial=True)\n if allocate_sync:\n # Block until we've fulfilled n environments\n self._handle_connect(n=self.n)\n else:\n # Handle any backends which synchronously fufill their\n # allocation.\n self._handle_connect()",
"def do_env(self, line):\n \"\"\"\n env {environment-name}\n \"\"\"\n if not line:\n print \"use: env {environment-name}\"\n else:\n if not set_environment(line):\n print \"no configuration for environment %s\" % line\n else:\n self.do_login('')",
"def _init_env(self):\n '''\n Initialize some Salt environment.\n '''\n from salt.config import minion_config\n from salt.grains import core as g_core\n g_core.__opts__ = minion_config(self.DEFAULT_MINION_CONFIG_PATH)\n self.grains_core = g_core",
"protected function configure()\n {\n $this\n ->setName('environment:info')\n ->addArgument('property', InputArgument::OPTIONAL, 'The name of the property')\n ->addArgument('value', InputArgument::OPTIONAL, 'Set a new value for the property')\n ->addOption('refresh', null, InputOption::VALUE_NONE, 'Whether to refresh the cache')\n ->setDescription('Read or set properties for an environment');\n PropertyFormatter::configureInput($this->getDefinition());\n Table::configureInput($this->getDefinition());\n $this->addProjectOption()\n ->addEnvironmentOption()\n ->addWaitOptions();\n $this->addExample('Read all environment properties')\n ->addExample(\"Show the environment's status\", 'status')\n ->addExample('Show the date the environment was created', 'created_at')\n ->addExample('Enable email sending', 'enable_smtp true')\n ->addExample('Change the environment title', 'title \"New feature\"')\n ->addExample(\"Change the environment's parent branch\", 'parent sprint-2');\n $this->setHiddenAliases(['environment:metadata']);\n }",
"def setup_env(hparams,\n batch_size,\n max_num_noops,\n rl_env_max_episode_steps=-1,\n env_name=None):\n \"\"\"Setup.\"\"\"\n if not env_name:\n env_name = full_game_name(hparams.game)\n\n maxskip_envs = should_apply_max_and_skip_env(hparams)\n\n env = T2TGymEnv(\n base_env_name=env_name,\n batch_size=batch_size,\n grayscale=hparams.grayscale,\n should_derive_observation_space=hparams\n .rl_should_derive_observation_space,\n resize_width_factor=hparams.resize_width_factor,\n resize_height_factor=hparams.resize_height_factor,\n rl_env_max_episode_steps=rl_env_max_episode_steps,\n max_num_noops=max_num_noops,\n maxskip_envs=maxskip_envs,\n sticky_actions=hparams.sticky_actions\n )\n return env",
"def init(fileformat, path, env, _vars, _secrets, wg, y, django):\n \"\"\"Inits a dynaconf project\n By default it creates a settings.toml and a .secrets.toml\n for [default|development|staging|testing|production|global] envs.\n\n The format of the files can be changed passing\n --format=yaml|json|ini|py.\n\n This command must run on the project's root folder or you must pass\n --path=/myproject/root/folder.\n\n If you want to have a .env created with the ENV defined there e.g:\n `ENV_FOR_DYNACONF=production` just pass --env=production and then .env\n will also be created and the env defined to production.\n \"\"\"\n click.echo(\"Cofiguring your Dynaconf environment\")\n\n env = env or settings.current_env.lower()\n\n loader = importlib.import_module(\n \"dynaconf.loaders.{}_loader\".format(fileformat)\n )\n # Turn foo=bar=zaz in {'foo': 'bar=zaz'}\n env_data = split_vars(_vars)\n _secrets = split_vars(_secrets)\n\n # create placeholder data for every env\n settings_data = {k: {\"value\": \"value for {}\".format(k)} for k in ENVS}\n secrets_data = {k: {\"secret\": \"secret for {}\".format(k)} for k in ENVS}\n if env_data:\n settings_data[env] = env_data\n settings_data[\"default\"] = {k: \"default\" for k in env_data}\n if _secrets:\n secrets_data[env] = _secrets\n secrets_data[\"default\"] = {k: \"default\" for k in _secrets}\n\n path = Path(path)\n\n if str(path).endswith(\n constants.ALL_EXTENSIONS + (\"py\",)\n ): # pragma: no cover # noqa\n settings_path = path\n secrets_path = path.parent / \".secrets.{}\".format(fileformat)\n dotenv_path = path.parent / \".env\"\n gitignore_path = path.parent / \".gitignore\"\n else:\n if fileformat == \"env\":\n if str(path) in (\".env\", \"./.env\"): # pragma: no cover\n settings_path = path\n elif str(path).endswith(\"/.env\"): # pragma: no cover\n settings_path = path\n elif str(path).endswith(\".env\"): # pragma: no cover\n settings_path = path.parent / \".env\"\n else:\n settings_path = path / \".env\"\n Path.touch(settings_path)\n secrets_path = None\n else:\n settings_path = path / \"settings.{}\".format(fileformat)\n secrets_path = path / \".secrets.{}\".format(fileformat)\n dotenv_path = path / \".env\"\n gitignore_path = path / \".gitignore\"\n\n if fileformat in [\"py\", \"env\"]:\n # for Python and .env files writes a single env\n settings_data = settings_data[env]\n secrets_data = secrets_data[env]\n\n if not y and settings_path and settings_path.exists(): # pragma: no cover\n click.confirm(\n \"{} exists do you want to overwrite it?\".format(settings_path),\n abort=True,\n )\n\n if not y and secrets_path and secrets_path.exists(): # pragma: no cover\n click.confirm(\n \"{} exists do you want to overwrite it?\".format(secrets_path),\n abort=True,\n )\n\n if settings_path and settings_data:\n loader.write(settings_path, settings_data, merge=True)\n if secrets_path and secrets_data:\n loader.write(secrets_path, secrets_data, merge=True)\n\n # write .env file\n # if env not in ['default', 'development']: # pragma: no cover\n if not dotenv_path.exists(): # pragma: no cover\n Path.touch(dotenv_path)\n dotenv_cli.set_key(str(dotenv_path), \"ENV_FOR_DYNACONF\", env.upper())\n else: # pragma: no cover\n click.echo(\n \".env already exists please set ENV_FOR_DYNACONF={}\".format(\n env.upper()\n )\n )\n\n if wg:\n # write .gitignore\n ignore_line = \".secrets.*\"\n comment = \"\\n# Ignore dynaconf secret files\\n\"\n if not gitignore_path.exists():\n with io.open(str(gitignore_path), \"w\", encoding=ENC) as f:\n f.writelines([comment, ignore_line, \"\\n\"])\n else:\n existing = (\n ignore_line\n in io.open(str(gitignore_path), encoding=ENC).read()\n )\n if not existing: # pragma: no cover\n with io.open(str(gitignore_path), \"a+\", encoding=ENC) as f:\n f.writelines([comment, ignore_line, \"\\n\"])\n\n if django: # pragma: no cover\n dj_module, loaded_from = get_module({}, django)\n dj_filename = dj_module.__file__\n if Path(dj_filename).exists():\n click.confirm(\n \"{} is found do you want to add dynaconf?\".format(dj_filename),\n abort=True,\n )\n with open(dj_filename, \"a\") as dj_file:\n dj_file.write(constants.DJANGO_PATCH)\n else:\n click.echo(\"Django settings file not written.\")",
"def setup_example():\n ''' Add commands for testing, etc. '''\n parser = argparse.ArgumentParser(\n description='Creates a simple default project (stroop) in the current\\\n directory with the necessary psiTurk files.'\n )\n\n # Optional flags\n parser.add_argument(\n '-v', '--version', help='Print version number.', action=\"store_true\"\n )\n args = parser.parse_args()\n\n # If requested version just print and quite\n if args.version:\n print version_number\n else:\n import psiturk.setup_example as se\n se.setup_example()",
"def load_environment(global_conf, app_conf):\n \"\"\"Configure the Pylons environment via the ``pylons.config``\n object\n \"\"\"\n # Pylons paths\n root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n paths = dict(root=root,\n controllers=os.path.join(root, 'controllers'),\n static_files=os.path.join(root, 'public'),\n templates=[os.path.join(root, 'templates')])\n\n # Initialize config with the basic options\n config.init_app(global_conf, app_conf, package='studio', paths=paths)\n\n # Defines custom config parameters\n config['resources_dir'] = os.path.join(root, 'resources')\n # path to mapserver dir containing default fonts and symbols\n config['mapserver_dir'] = os.path.join(config['resources_dir'], 'mapserver')\n # path to default directory datastore\n config['default_datastore_dir'] = os.path.join(config['resources_dir'], 'default_datastore')\n # path to the template including the <script> tags\n config['js_tmpl'] = os.path.join(paths['templates'][0], 'index.html')\n\n # Convert the debug variable from the config to a boolean value\n config['debug'] = asbool(config['debug'])\n\n config['routes.map'] = make_map()\n config['pylons.app_globals'] = app_globals.Globals()\n config['pylons.h'] = studio.lib.helpers\n\n # Create the Mako TemplateLookup, with the default auto-escaping\n config['pylons.app_globals'].mako_lookup = TemplateLookup(\n directories=paths['templates'],\n error_handler=handle_mako_error,\n module_directory=os.path.join(app_conf['cache_dir'], 'templates'),\n input_encoding='utf-8', output_encoding='utf-8',\n imports=['from webhelpers.html import escape'],\n default_filters=['escape'])\n \n # Setup SQLAlchemy database engine\n engine = engine_from_config(config, 'sqlalchemy.')\n init_model(engine)",
"def setup_example():\n ''' Setup example '''\n if os.path.exists(EXAMPLE_TARGET):\n print \"Error, `psiturk-example` directory already exists. Please \\\n remove it then re-run the command.\"\n else:\n print \"Creating new folder `psiturk-example` in the current working \\\n directory\"\n os.mkdir(EXAMPLE_TARGET)\n print \"Copying\", EXAMPLE_DIR, \"to\", EXAMPLE_TARGET\n dir_util.copy_tree(EXAMPLE_DIR, EXAMPLE_TARGET)\n # change to target director\n print \"Creating default configuration file (config.txt)\"\n file_util.copy_file(DEFAULT_CONFIG_FILE, CONFIG_TARGET)\n os.chdir(EXAMPLE_TARGET)\n os.rename('custom.py.txt', 'custom.py')\n\n if not os.path.exists(GLOBAL_CONFIG_FILE):\n print \"The following config file does not exist:\\n{}\\\n \\nCreating default config file at that \\\n location.\".format(GLOBAL_CONFIG_FILE)\n file_util.copy_file(DEFAULT_GLOBAL_CONFIG_FILE, GLOBAL_CONFIG_FILE)",
"def configure():\n '''\n Configure the transfer environment and store\n '''\n completer = Completer()\n readline.set_completer_delims('\\t')\n readline.parse_and_bind('tab: complete')\n readline.set_completer(completer.path_completer)\n\n home = os.path.expanduser('~')\n if os.path.isfile(os.path.join(home, '.transfer', 'config.yaml')):\n with open(os.path.join(home, '.transfer', 'config.yaml'), 'r') as fp:\n config = yaml.load(fp.read())\n else:\n config = []\n\n project_name = input('Name your project: ')\n existing_project = None\n for project in config:\n if project_name == project['name']:\n existing_project = project_name\n if existing_project is not None:\n print(colored('Project ' + project_name + ' already exists', 'red'))\n overwrite = str_input('Would you like to overwrite this project? (yes or no) ', ['yes', 'no'])\n if overwrite == 'no':\n return\n else:\n config = [project for project in config if project_name != project['name']]\n\n image_path = os.path.expanduser(input('Select parent directory for your images: '))\n path_unset = True\n while path_unset:\n project_path = os.path.expanduser(input('Select destination for your project: '))\n if (project_path.find(image_path) == 0):\n print('Project destination should not be same or within image directory!')\n else:\n path_unset = False\n\n print('Select architecture:')\n print('[0] resnet50')\n print('[1] xception')\n print('[2] inception_v3')\n architecture = int_input('choice', 0, 2, show_range = False)\n if architecture == 0:\n arch = 'resnet50'\n img_dim = 224\n conv_dim = 7\n final_cutoff = 80\n elif architecture == 1:\n arch = 'xception'\n img_dim = 299\n conv_dim = 10\n final_cutoff = 80\n else:\n arch = 'inception_v3'\n img_dim = 299\n conv_dim = 8\n final_cutoff = 80\n api_port = int_input('port for local prediction API (suggested: 5000)', 1024, 49151)\n kfold = int_input('number of folds to use (suggested: 5)', 3, 10)\n kfold_every = bool_input('Fit a model for every fold? (if false, just fit one)')\n print('Warning: if working on a remote computer, you may not be able to plot!')\n plot_cm = bool_input('Plot a confusion matrix after training?')\n batch_size = int_input('batch size (suggested: 8)', 1, 64)\n learning_rate = float_input('learning rate (suggested: 0.001)', 0, 1)\n learning_rate_decay = float_input('learning decay rate (suggested: 0.000001)', 0, 1)\n cycle = int_input('number of cycles before resetting the learning rate (suggested: 3)', 1, 10)\n num_rounds = int_input('number of rounds (suggested: 3)', 1, 100)\n print('Select image resolution:')\n print('[0] low (' + str(img_dim) + ' px)')\n print('[1] mid (' + str(img_dim * 2) + ' px)')\n print('[2] high (' + str(img_dim * 4) + ' px)')\n img_resolution_index = int_input('choice', 0, 2, show_range = False)\n if img_resolution_index == 0:\n img_size = 1\n elif img_resolution_index == 1:\n img_size = 2\n else:\n img_size = 4\n use_augmentation = str_input('Would you like to add image augmentation? (yes or no) ', ['yes', 'no'])\n if use_augmentation == 'yes':\n augmentations = select_augmentations()\n else:\n augmentations = None\n\n project = {'name': project_name,\n 'img_path': image_path,\n 'path': project_path,\n 'plot': plot_cm,\n 'api_port': api_port,\n 'kfold': kfold,\n 'kfold_every': kfold_every,\n 'cycle': cycle,\n 'seed': np.random.randint(9999),\n 'batch_size': batch_size,\n 'learning_rate': learning_rate,\n 'learning_rate_decay': learning_rate_decay,\n 'final_cutoff': final_cutoff,\n 'rounds': num_rounds,\n 'img_size': img_size,\n 'augmentations': augmentations,\n 'architecture': arch,\n 'img_dim': img_dim,\n 'conv_dim': conv_dim,\n 'is_split': False,\n 'is_array': False,\n 'is_augmented': False,\n 'is_pre_model': False,\n 'is_final': False,\n 'model_round': 0,\n 'server_weights': None,\n 'last_weights': None,\n 'best_weights': None}\n\n config.append(project)\n store_config(config)\n print('')\n print(colored('Project configure saved!', 'cyan'))\n print('')\n print('To run project:')\n print('')\n print(colored(' transfer --run --project ' + project_name, 'green'))\n print('or')\n print(colored(' transfer -r -p ' + project_name, 'green'))",
"def new_env(environment):\n \"\"\" Create a new environment in the configuration and ask the\n user for the commands for this specific environment.\n \"\"\"\n if not environment:\n print(\"You need to supply an environment name\")\n return\n\n parser = read_config()\n\n if environment in parser.sections():\n print(\"Environment '%s' already exists\" % environment)\n return\n\n print(\"Please introduce (in order) the commands for '%s'\\n\" % environment)\n print(\"Press RETURN to end command and RETURN with empty line to finish\\n\")\n\n commands = []\n cmd = \"\"\n\n while True:\n try:\n cmd = raw_input(\"> \")\n\n if not cmd:\n break\n\n commands.append(cmd)\n\n except KeyboardInterrupt:\n return\n\n parser.add_section(environment)\n parser.set(environment, \"cmd\", \"\\n\".join(commands))\n\n write_config(parser)\n\n print(\"Added environment '%s'\" % environment)",
"public static void configureExample(String host, int port, String user, String password, Authentication authType)\n throws IOException, ResourceNotFoundException, ForbiddenUserException, FailedRequestException, ResourceNotResendableException\n {\n // create the client\n DatabaseClient client = DatabaseClientFactory.newClient(host, port, user, password, authType);\n\n installResourceExtension(client);\n\n configureQueryOptions(client);\n\n setUpExample(client);\n\n // release the client\n client.release();\n }"
] |
[
0.753597617149353,
0.747139573097229,
0.7193689942359924,
0.7176499366760254,
0.7167496085166931,
0.7145485877990723,
0.7098968029022217,
0.7098963260650635,
0.7090998888015747,
0.7090483903884888,
0.7075063586235046,
0.7068800330162048
] |
Commands for devops operations
|
def start(ctx, debug, version, config):
"""Commands for devops operations"""
ctx.obj = {}
ctx.DEBUG = debug
if os.path.isfile(config):
with open(config) as fp:
agile = json.load(fp)
else:
agile = {}
ctx.obj['agile'] = agile
if version:
click.echo(__version__)
ctx.exit(0)
if not ctx.invoked_subcommand:
click.echo(ctx.get_help())
|
[
"def cmd_devop(self, args):\n '''device operations'''\n usage = \"Usage: devop <read|write> <spi|i2c> name bus address\"\n if len(args) < 5:\n print(usage)\n return\n\n if args[1] == 'spi':\n bustype = mavutil.mavlink.DEVICE_OP_BUSTYPE_SPI\n elif args[1] == 'i2c':\n bustype = mavutil.mavlink.DEVICE_OP_BUSTYPE_I2C\n else:\n print(usage)\n\n if args[0] == 'read':\n self.devop_read(args[2:], bustype)\n elif args[0] == 'write':\n self.devop_write(args[2:], bustype)\n else:\n print(usage)",
"function exec() {\n\n var actions = {\n commands: {\n 'list-dependencies': { action: _listDependencies },\n 'list-devdependencies': { action: _listDevDependencies },\n 'list-peerdependencies': { action: _listPeerDependencies },\n 'list-optdependencies': { action: _listOptDependencies },\n 'list-alldependencies': { action: _listAllDependencies },\n 'set-node-engine': { action: _setNodeEngine },\n 'sort-dependencies': { action: _sortDependencies },\n 'sort-devdependencies': { action: _sortDevDependencies },\n 'sort-peerdependencies': { action: _sortPeerDependencies },\n 'sort-optdependencies': { action: _sortOptDependencies },\n 'sort-alldependencies': { action: _sortAllDependencies },\n 'traverse-dependencies': { action: _traverseDependencies },\n 'upgrade-version-patch': { action: _upgradeVersionPatch },\n 'upgrade-version-minor': { action: _upgradeVersionMinor },\n 'upgrade-version-major': { action: _upgradeVersionMajor },\n 'upgrade-version': { action: _upgradeVersionPatch },\n 'upgrade-dependencies': { action: _upgradeDependencies },\n }\n };\n\n cli.command(__dirname, actions);\n}",
"def deb(options):\n \"\"\"\n Creates debian packages.\n\n Example uses:\n paver deb\n paver deb -k 12345\n paver deb -k 12345 -p geonode/testing\n \"\"\"\n key = options.get('key', None)\n ppa = options.get('ppa', None)\n\n version, simple_version = versions()\n\n info('Creating package for GeoNode version %s' % version)\n\n with pushd('package'):\n # Get rid of any uncommitted changes to debian/changelog\n info('Getting rid of any uncommitted changes in debian/changelog')\n sh('git checkout debian/changelog')\n\n # Workaround for git-dch bug\n # http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=594580\n path('.git').makedirs()\n\n # Install requirements\n #sh('sudo apt-get -y install debhelper devscripts git-buildpackage')\n\n sh(('git-dch --spawn-editor=snapshot --git-author --new-version=%s'\n ' --id-length=6 --ignore-branch --release' % (\n simple_version)))\n\n deb_changelog = path('debian') / 'changelog'\n for line in fileinput.input([deb_changelog], inplace = True):\n print line.replace(\"urgency=low\", \"urgency=high\"),\n\n ## Revert workaround for git-dhc bug\n path('.git').rmtree()\n\n if key is None and ppa is None:\n # A local installable package\n sh('debuild -uc -us -A')\n elif key is None and ppa is not None:\n # A sources package, signed by daemon\n sh('debuild -S')\n elif key is not None and ppa is None:\n # A signed installable package\n sh('debuild -k%s -A' % key)\n elif key is not None and ppa is not None:\n # A signed, source package\n sh('debuild -k%s -S' % key)\n\n if ppa is not None:\n sh('dput ppa:%s geonode_%s_source.changes' % (ppa, simple_version))",
"def cli(env, **kwargs):\n \"\"\"host order options for a given dedicated host.\n\n To get a list of available backend routers see example:\n slcli dh create-options --datacenter dal05 --flavor 56_CORES_X_242_RAM_X_1_4_TB\n \"\"\"\n\n mgr = SoftLayer.DedicatedHostManager(env.client)\n tables = []\n\n if not kwargs['flavor'] and not kwargs['datacenter']:\n options = mgr.get_create_options()\n\n # Datacenters\n dc_table = formatting.Table(['datacenter', 'value'])\n dc_table.sortby = 'value'\n for location in options['locations']:\n dc_table.add_row([location['name'], location['key']])\n tables.append(dc_table)\n\n dh_table = formatting.Table(['Dedicated Virtual Host Flavor(s)', 'value'])\n dh_table.sortby = 'value'\n for item in options['dedicated_host']:\n dh_table.add_row([item['name'], item['key']])\n tables.append(dh_table)\n else:\n if kwargs['flavor'] is None or kwargs['datacenter'] is None:\n raise exceptions.ArgumentError('Both a flavor and datacenter need '\n 'to be passed as arguments '\n 'ex. slcli dh create-options -d '\n 'ams01 -f '\n '56_CORES_X_242_RAM_X_1_4_TB')\n router_opt = mgr.get_router_options(kwargs['datacenter'], kwargs['flavor'])\n br_table = formatting.Table(\n ['Available Backend Routers'])\n for router in router_opt:\n br_table.add_row([router['hostname']])\n tables.append(br_table)\n\n env.fout(formatting.listing(tables, separator='\\n'))",
"private function nodeJsCommand()\n {\n\n // todo check nodejs exists in windows os\n return true;\n $found = false;\n foreach (['node', 'nodejs'] as $command) {\n $this->process->setCommandLine('which ' . $command);\n $this->process->setTimeout(null);\n $this->process->run();\n\n if ($this->process->isSuccessful()) {\n $found = true;\n break;\n }\n }\n\n if (!$found) {\n $this->console->error('node.js was not found');\n $this->errors = true;\n }\n }",
"def cmd_devid(args):\n '''show parameters'''\n params = mestate.mlog.params\n k = sorted(params.keys())\n for p in k:\n if p.startswith('COMPASS_DEV_ID'):\n mp_util.decode_devid(params[p], p)\n if p.startswith('INS_') and p.endswith('_ID'):\n mp_util.decode_devid(params[p], p)",
"function command(base, actions, opts) {\n\n actions = actions || {};\n opts = opts || {};\n\n var commands = JSON.parse(fs.readFileSync(p.join(base, opts.commandFile || '../conf/commands.json'))),\n pkg = JSON.parse(fs.readFileSync(p.join(base, '../package.json')));\n\n if (actions.commands && commands.commands) {\n _.each(actions.commands, function (command, name) {\n if (commands.commands[name]) {\n commands.commands[name].action = command.action;\n }\n });\n }\n\n commander.version(pkg.version);\n\n if (commands.options) {\n _.each(commands.options, function (option) {\n commander.option(option.arg, option.desc, option.action);\n });\n }\n\n _.each(commands.commands, function (command, name) {\n var program = commander\n .command(name)\n .description(command.desc);\n\n _.each(command.options, function (option) {\n program.option(option.arg, option.desc, option.action);\n });\n\n program.action(command.action);\n });\n\n _preCommand(commands.commands);\n\n commander.parse(process.argv);\n\n // NOTE: commander.args is populated by commander#parse,\n // hence _postCommand relies on commander#parse finishing without exiting or throwing error,\n // otherwise _postCommand won't be executed\n _postCommand(commander.args, commands.commands, commands.options);\n}",
"def commands\n validate_arguments!\n\n ruby_cmd = Heroku::Command.commands.inject({}) {|h, (cmd, command)| h[cmd] = command_to_hash('ruby', cmd, command) ; h}\n commands = Heroku::JSPlugin.commands_info['commands']\n node_cmd = command_list_to_hash(commands.select {|command| command['plugin'] != ''}, 'node')\n go_cmd = command_list_to_hash(commands.select {|command| command['plugin'] == ''}, 'go')\n\n all_cmd = {}\n all_cmd.merge!(ruby_cmd)\n all_cmd.merge!(node_cmd)\n all_cmd.merge!(go_cmd)\n\n sorted_cmd = all_cmd.sort { |a,b| a[0] <=> b[0] }.map{|cmd| cmd[1]}\n\n attrs = [:command, :type, :plugin]\n header = attrs.map{|attr| attr.to_s.capitalize}\n\n count_attrs = [:type, :count]\n count_header = count_attrs.map{|attr| attr.to_s.capitalize}\n\n counts = all_cmd.inject(Hash.new(0)) {|h, (_, cmd)| h[cmd[:type]] += 1; h}\n type_and_percentage = counts.keys.sort.map{|type| {:type => type, :count => counts[type]}}\n\n if options[:csv]\n csv_str = CSV.generate do |csv|\n csv << header\n sorted_cmd.each {|cmd| csv << attrs.map{|attr| cmd[attr]}}\n\n csv << []\n csv << count_header\n type_and_percentage.each {|type| csv << count_attrs.map{|attr| type[attr]}}\n end\n display(csv_str)\n else\n display_table(sorted_cmd, attrs, header)\n display(\"\")\n display_table(type_and_percentage, count_attrs, count_header)\n end\n end",
"function (command, options) {\n var exec = require('child_process').exec;\n var defer = Q.defer();\n\n //Prepare the options object to be valid\n options = prepareOptions(options);\n\n //Activate-Deactivate command logging execution\n printCommandExecution(command, options);\n\n exec('git ' + prepareCommand(command), options, function (err, stdout, stderr) {\n //Activate-deactivate err and out logging\n printCommandResponse({err:err, stdout:stdout, stderr:stderr});\n\n if (err) {\n defer.reject({err: err, stderr: stderr});\n } else {\n defer.resolve({res:stdout, out:stderr});\n }\n });\n\n return defer.promise;\n }",
"function exec() {\n\n // NOTE: pardon this cli target to Couchpenter methods mapping,\n // needed to preserve backward compatibility w/ v0.1.x\n const FUNCTIONS = {\n setup: 'setUp',\n 'setup-db': 'setUpDatabases',\n 'setup-doc': 'setUpDocuments',\n 'setup-doc-overwrite': 'setUpDocumentsOverwrite',\n teardown: 'tearDown',\n 'teardown-db': 'tearDownDatabases',\n 'teardown-doc': 'tearDownDocuments',\n reset: 'reset',\n 'reset-db': 'resetDatabases',\n 'reset-doc': 'resetDocuments',\n clean: 'clean',\n 'clean-db': 'cleanDatabases',\n 'warm-view': 'warmViews',\n 'live-deploy-view': 'liveDeployView'\n };\n\n var actions = {\n commands: {\n init: { action: _init }\n }\n };\n\n _.keys(FUNCTIONS).forEach(function (task) {\n actions.commands[task] = { action: _task(FUNCTIONS[task]) };\n });\n\n cli.command(__dirname, actions);\n}",
"def devserver_cmd(argv=sys.argv[1:]): # pragma: no cover\n \"\"\"\\\nServe the web API for development.\n\nUsage:\n pld-devserver [options]\n\nOptions:\n -h --help Show this screen.\n\n --host=<host> The host to use [default: 0.0.0.0].\n\n --port=<port> The port to use [default: 5000].\n\n --debug=<debug> Whether or not to use debug mode [default: 0].\n\"\"\"\n arguments = docopt(devserver_cmd.__doc__, argv=argv)\n initialize_config()\n app.run(\n host=arguments['--host'],\n port=int(arguments['--port']),\n debug=int(arguments['--debug']),\n )",
"function devserver(yargs) {\n var requirements = pathRequiredUsage(\n 'Path to a staticbuild.json file or directory to find one.\\n' +\n ' If no path is supplied, the current directory is used.');\n \n yargs = configureYargs(yargs)\n .usage(commandUsage('dev', '[options] <path>' + requirements, \n 'Development server.\\n\\n' + \n ' Runs a local http server to dynamically render static content ' + \n 'during development.'))\n .option('b', {\n alias: 'bundling',\n description: 'Enable bundling.',\n type: 'boolean',\n // jscs: disable\n 'default': false\n // jscs: enable\n })\n .option('r', {\n alias: 'restart',\n description: 'Number of seconds to delay nodemon restarts. -1 to disable nodemon.',\n type: 'number',\n // jscs: disable\n 'default': 1\n // jscs: enable\n })\n .help('h', 'Show help.').alias('h', 'help');\n yargs = configureVerbosity(yargs);\n\n var args = processYargs(yargs, 'dev');\n parseDevserverRestart(args);\n pathRequired(args, 1002);\n return args;\n}"
] |
[
0.7714719176292419,
0.7381599545478821,
0.7163476943969727,
0.687071681022644,
0.6861153841018677,
0.6814738512039185,
0.6775156259536743,
0.6766917705535889,
0.6749687790870667,
0.6739022135734558,
0.6731467247009277,
0.6728531122207642
] |
Duplicate all related objects of obj setting
field to value. If one of the duplicate
objects has an FK to another duplicate object
update that as well. Return the duplicate copy
of obj.
duplicate_order is a list of models which specify how
the duplicate objects are saved. For complex objects
this can matter. Check to save if objects are being
saved correctly and if not just pass in related objects
in the order that they should be saved.
|
def duplicate(obj, value=None, field=None, duplicate_order=None):
"""
Duplicate all related objects of obj setting
field to value. If one of the duplicate
objects has an FK to another duplicate object
update that as well. Return the duplicate copy
of obj.
duplicate_order is a list of models which specify how
the duplicate objects are saved. For complex objects
this can matter. Check to save if objects are being
saved correctly and if not just pass in related objects
in the order that they should be saved.
"""
using = router.db_for_write(obj._meta.model)
collector = CloneCollector(using=using)
collector.collect([obj])
collector.sort()
related_models = list(collector.data.keys())
data_snapshot = {}
for key in collector.data.keys():
data_snapshot.update({
key: dict(zip(
[item.pk for item in collector.data[key]], [item for item in collector.data[key]]))
})
root_obj = None
# Sometimes it's good enough just to save in reverse deletion order.
if duplicate_order is None:
duplicate_order = reversed(related_models)
for model in duplicate_order:
# Find all FKs on model that point to a related_model.
fks = []
for f in model._meta.fields:
if isinstance(f, ForeignKey) and f.rel.to in related_models:
fks.append(f)
# Replace each `sub_obj` with a duplicate.
if model not in collector.data:
continue
sub_objects = collector.data[model]
for obj in sub_objects:
for fk in fks:
fk_value = getattr(obj, "%s_id" % fk.name)
# If this FK has been duplicated then point to the duplicate.
fk_rel_to = data_snapshot[fk.rel.to]
if fk_value in fk_rel_to:
dupe_obj = fk_rel_to[fk_value]
setattr(obj, fk.name, dupe_obj)
# Duplicate the object and save it.
obj.id = None
if field is not None:
setattr(obj, field, value)
obj.save()
if root_obj is None:
root_obj = obj
return root_obj
|
[
"def _update_related(self, obj, related, subfield_dict):\n \"\"\"\n update DB objects related to a base object\n obj: a base object to create related\n related: dict mapping field names to lists of related objects\n subfield_list: where to get the next layer of subfields\n \"\"\"\n # keep track of whether or not anything was updated\n updated = False\n\n # for each related field - check if there are differences\n for field, items in related.items():\n # skip subitem check if it's locked anyway\n if field in obj.locked_fields:\n continue\n\n # get items from database\n dbitems = list(getattr(obj, field).all())\n dbitems_count = len(dbitems)\n\n # default to doing nothing\n do_delete = do_update = False\n\n if items and dbitems_count: # we have items, so does db, check for conflict\n do_delete = do_update = items_differ(items, dbitems, subfield_dict[field][2])\n elif items and not dbitems_count: # we have items, db doesn't, just update\n do_update = True\n elif not items and dbitems_count: # db has items, we don't, just delete\n do_delete = True\n # otherwise: no items or dbitems, so nothing is done\n\n # don't delete if field is in merge_related\n if field in self.merge_related:\n new_items = []\n # build a list of keyfields to existing database objects\n keylist = self.merge_related[field]\n keyed_dbitems = {tuple(getattr(item, k) for k in keylist):\n item for item in dbitems}\n\n # go through 'new' items\n # if item with the same keyfields exists:\n # update the database item w/ the new item's properties\n # else:\n # add it to new_items\n for item in items:\n key = tuple(item.get(k) for k in keylist)\n dbitem = keyed_dbitems.get(key)\n if not dbitem:\n new_items.append(item)\n else:\n # update dbitem\n for fname, val in item.items():\n setattr(dbitem, fname, val)\n dbitem.save()\n\n # import anything that made it to new_items in the usual fashion\n self._create_related(obj, {field: new_items}, subfield_dict)\n else:\n # default logic is to just wipe and recreate subobjects\n if do_delete:\n updated = True\n getattr(obj, field).all().delete()\n if do_update:\n updated = True\n self._create_related(obj, {field: items}, subfield_dict)\n\n return updated",
"def reorder(*args)\n return self if args.blank?\n\n relation = clone\n relation.reordering_value = true\n relation.order_values = args.flatten\n relation\n end",
"def deduplicate(list_object):\n \"\"\"Rebuild `list_object` removing duplicated and keeping order\"\"\"\n new = []\n for item in list_object:\n if item not in new:\n new.append(item)\n return new",
"def duplicate_statements(model, oldorigin, neworigin, rfilter=None):\n '''\n Take links with a given origin, and create duplicate links with the same information but a new origin\n\n :param model: Versa model to be updated\n :param oldres: resource IRI to be duplicated\n :param newres: origin resource IRI for duplication\n :return: None\n '''\n for o, r, t, a in model.match(oldorigin):\n if rfilter is None or rfilter(o, r, t, a):\n model.add(I(neworigin), r, t, a)\n return",
"def duplicate_object_hook(ordered_pairs):\n \"\"\"Make lists out of duplicate keys.\"\"\"\n json_dict = {}\n for key, val in ordered_pairs:\n existing_val = json_dict.get(key)\n if not existing_val:\n json_dict[key] = val\n else:\n if isinstance(existing_val, list):\n existing_val.append(val)\n else:\n json_dict[key] = [existing_val, val]\n\n return json_dict",
"def create_order_objects(model, order_fields):\n \"\"\"\n Create order items for objects already present in the database.\n \"\"\"\n for rel in model._meta.get_all_related_objects():\n rel_model = rel.model\n if rel_model.__module__ == 'order.models':\n\n objs = model.objects.all()\n values = {}\n for order_field in order_fields:\n order_objs = rel_model.objects.all().order_by('-%s' \\\n % order_field)\n try:\n values[order_field] = getattr(order_objs[0], \\\n order_field) + 1\n except IndexError:\n values[order_field] = 1\n for obj in objs:\n try:\n rel_model.objects.get(item=obj)\n except rel_model.DoesNotExist:\n rel_model.objects.create(item=obj, **values)\n for key in values:\n values[key] += 1",
"def uniquify(model):\n '''\n Remove all duplicate relationships\n '''\n seen = set()\n to_remove = set()\n for ix, (o, r, t, a) in model:\n hashable_link = (o, r, t) + tuple(sorted(a.items()))\n #print(hashable_link)\n if hashable_link in seen:\n to_remove.add(ix)\n seen.add(hashable_link)\n\n model.remove(to_remove)\n return",
"def process_post_many_to_many_field(existing_post, field, related_objects):\n \"\"\"\n Sync data for a many-to-many field related to a post using set differences.\n\n :param existing_post: Post object that needs to be sync'd\n :param field: the many-to-many field to update\n :param related_objects: the list of objects for the field, that need to be sync'd to the Post\n :return: None\n \"\"\"\n to_add = set(related_objects.get(existing_post.wp_id, set())) - set(getattr(existing_post, field).all())\n to_remove = set(getattr(existing_post, field).all()) - set(related_objects.get(existing_post.wp_id, set()))\n\n if to_add:\n getattr(existing_post, field).add(*to_add)\n if to_remove:\n getattr(existing_post, field).remove(*to_remove)",
"def _sort_by_unique_fields(model, model_objs, unique_fields):\n \"\"\"\n Sort a list of models by their unique fields.\n\n Sorting models in an upsert greatly reduces the chances of deadlock\n when doing concurrent upserts\n \"\"\"\n unique_fields = [\n field for field in model._meta.fields\n if field.attname in unique_fields\n ]\n\n def sort_key(model_obj):\n return tuple(\n field.get_db_prep_save(getattr(model_obj, field.attname),\n connection)\n for field in unique_fields\n )\n return sorted(model_objs, key=sort_key)",
"def _create_related(self, obj, related, subfield_dict):\n \"\"\"\n create DB objects related to a base object\n obj: a base object to create related\n related: dict mapping field names to lists of related objects\n subfield_list: where to get the next layer of subfields\n \"\"\"\n for field, items in related.items():\n subobjects = []\n all_subrelated = []\n Subtype, reverse_id_field, subsubdict = subfield_dict[field]\n for order, item in enumerate(items):\n # pull off 'subrelated' (things that are related to this obj)\n subrelated = {}\n for subfield in subsubdict:\n subrelated[subfield] = item.pop(subfield)\n\n if field in self.preserve_order:\n item['order'] = order\n\n item[reverse_id_field] = obj.id\n\n try:\n subobjects.append(Subtype(**item))\n all_subrelated.append(subrelated)\n except Exception as e:\n raise DataImportError('{} while importing {} as {}'.format(e, item, Subtype))\n\n # add all subobjects at once (really great for actions & votes)\n try:\n Subtype.objects.bulk_create(subobjects)\n except Exception as e:\n raise DataImportError('{} while importing {} as {}'.format(e, subobjects, Subtype))\n\n # after import the subobjects, import their subsubobjects\n for subobj, subrel in zip(subobjects, all_subrelated):\n self._create_related(subobj, subrel, subsubdict)",
"def sanitize_order(model):\n \"\"\"\n Sanitize order values so eliminate conflicts and gaps.\n XXX: Early start, very ugly, needs work.\n \"\"\"\n to_order_dict = {}\n\n order_field_names = []\n for field in model._meta.fields:\n if isinstance(field, models.IntegerField):\n order_field_names.append(field.name)\n\n for field_name in order_field_names:\n to_order_dict[field_name] = list(model.objects.all().order_by(\\\n field_name, '-timestamp'))\n\n updates = {}\n for field_name, object_list in to_order_dict.items():\n for i, obj in enumerate(object_list):\n position = i + 1\n if getattr(obj, field_name) != position:\n if obj in updates:\n updates[obj][field_name] = position\n else:\n updates[obj] = {field_name: position}\n\n for obj, fields in updates.items():\n for field, value in fields.items():\n setattr(obj, field, value)\n obj.save()",
"def set_relation_many_to_many(self, obj, field_name, objs):\n \"Set a many-to-many field on an object\"\n relation = getattr(obj, field_name)\n if hasattr(relation, 'set'):\n relation.set(objs) # Django 2.x\n else:\n setattr(obj, field_name, objs)"
] |
[
0.6876682043075562,
0.6828989386558533,
0.6736040711402893,
0.6582846641540527,
0.6567643284797668,
0.6564415693283081,
0.656254768371582,
0.6561061143875122,
0.6523545980453491,
0.650313138961792,
0.6493257880210876,
0.6484439373016357
] |
Function getPayloadStruct
Get the payload structure to do a creation or a modification
@param attribute: The data
@param objType: SubItem type (e.g: hostgroup for hostgroup_class)
@return RETURN: the payload
|
def getPayloadStruct(self, attributes, objType):
""" Function getPayloadStruct
Get the payload structure to do a creation or a modification
@param attribute: The data
@param objType: SubItem type (e.g: hostgroup for hostgroup_class)
@return RETURN: the payload
"""
payload = {self.payloadObj: attributes,
objType + "_class":
{self.payloadObj: attributes}}
return payload
|
[
"def getPayloadStruct(self, attributes, objType=None):\n \"\"\" Function getPayloadStruct\n Get the payload structure to do a creation or a modification\n\n @param key: The key to modify\n @param attribute: The data\n @param objType: NOT USED in this class\n @return RETURN: The API result\n \"\"\"\n if self.setInParentPayload:\n return {self.parentPayloadObject:\n {self.payloadObj: attributes}}\n else:\n return {self.payloadObj: attributes}",
"def getPayloadStruct(self, payload):\n \"\"\" Function getPayloadStruct\n\n @param payload: The payload structure to the object to add\n @return RETURN: A dict\n \"\"\"\n newSubItem = self.objType(self.api, 0, self.parentObjName,\n self.parentPayloadObj, self.parentKey, {})\n return newSubItem.getPayloadStruct(payload, self.parentPayloadObj)",
"def decode_struct(self, data_type, obj):\n \"\"\"\n The data_type argument must be a Struct.\n See json_compat_obj_decode() for argument descriptions.\n \"\"\"\n if obj is None and data_type.has_default():\n return data_type.get_default()\n elif not isinstance(obj, dict):\n raise bv.ValidationError('expected object, got %s' %\n bv.generic_type_name(obj))\n all_fields = data_type.definition._all_fields_\n for extra_permission in self.caller_permissions.permissions:\n all_extra_fields = '_all_{}_fields_'.format(extra_permission)\n all_fields = all_fields + getattr(data_type.definition, all_extra_fields, [])\n\n if self.strict:\n all_field_names = data_type.definition._all_field_names_\n for extra_permission in self.caller_permissions.permissions:\n all_extra_field_names = '_all_{}_field_names_'.format(extra_permission)\n all_field_names = all_field_names.union(\n getattr(data_type.definition, all_extra_field_names, {}))\n\n for key in obj:\n if (key not in all_field_names and\n not key.startswith('.tag')):\n raise bv.ValidationError(\"unknown field '%s'\" % key)\n ins = data_type.definition()\n self.decode_struct_fields(ins, all_fields, obj)\n # Check that all required fields have been set.\n data_type.validate_fields_only_with_permissions(ins, self.caller_permissions)\n return ins",
"def handle_create_payload(\n entity: BaseEntity,\n author_user: UserType,\n protocol_name: str,\n to_user_key: RsaKey = None,\n parent_user: UserType = None,\n) -> str:\n \"\"\"Create a payload with the given protocol.\n\n Any given user arguments must have ``private_key`` and ``handle`` attributes.\n\n :arg entity: Entity object to send. Can be a base entity or a protocol specific one.\n :arg author_user: User authoring the object.\n :arg protocol_name: Protocol to create payload for.\n :arg to_user_key: Public key of user private payload is being sent to, required for private payloads.\n :arg parent_user: (Optional) User object of the parent object, if there is one. This must be given for the\n Diaspora protocol if a parent object exists, so that a proper ``parent_author_signature`` can\n be generated. If given, the payload will be sent as this user.\n :returns: Built payload message (str)\n \"\"\"\n mappers = importlib.import_module(f\"federation.entities.{protocol_name}.mappers\")\n protocol = importlib.import_module(f\"federation.protocols.{protocol_name}.protocol\")\n protocol = protocol.Protocol()\n outbound_entity = mappers.get_outbound_entity(entity, author_user.private_key)\n if parent_user:\n outbound_entity.sign_with_parent(parent_user.private_key)\n send_as_user = parent_user if parent_user else author_user\n data = protocol.build_send(entity=outbound_entity, from_user=send_as_user, to_user_key=to_user_key)\n return data",
"def create_object(self, obj_type, payload, return_fields=None):\n \"\"\"Create an Infoblox object of type 'obj_type'\n\n Args:\n obj_type (str): Infoblox object type,\n e.g. 'network', 'range', etc.\n payload (dict): Payload with data to send\n return_fields (list): List of fields to be returned\n Returns:\n The object reference of the newly create object\n Raises:\n InfobloxException\n \"\"\"\n self._validate_obj_type_or_die(obj_type)\n\n query_params = self._build_query_params(return_fields=return_fields)\n\n url = self._construct_url(obj_type, query_params)\n opts = self._get_request_options(data=payload)\n self._log_request('post', url, opts)\n if(self.session.cookies):\n # the first 'get' or 'post' action will generate a cookie\n # after that, we don't need to re-authenticate\n self.session.auth = None\n r = self.session.post(url, **opts)\n\n self._validate_authorized(r)\n\n if r.status_code != requests.codes.CREATED:\n response = utils.safe_json_load(r.content)\n already_assigned = 'is assigned to another network view'\n if response and already_assigned in response.get('text'):\n exception = ib_ex.InfobloxMemberAlreadyAssigned\n else:\n exception = ib_ex.InfobloxCannotCreateObject\n raise exception(\n response=response,\n obj_type=obj_type,\n content=r.content,\n args=payload,\n code=r.status_code)\n\n return self._parse_reply(r)",
"def get_payload(self, items):\n \"\"\"Upload given items to given account\n\n data is an iterable of tuples where the first element is a Folder\n instance representing the ParentFolder that the item will be placed in\n and the second element is a Data string returned from an ExportItems\n call.\n \"\"\"\n from .properties import ParentFolderId\n uploaditems = create_element('m:%s' % self.SERVICE_NAME)\n itemselement = create_element('m:Items')\n uploaditems.append(itemselement)\n for parent_folder, data_str in items:\n item = create_element('t:Item', CreateAction='CreateNew')\n parentfolderid = ParentFolderId(parent_folder.id, parent_folder.changekey)\n set_xml_value(item, parentfolderid, version=self.account.version)\n add_xml_child(item, 't:Data', data_str)\n itemselement.append(item)\n return uploaditems",
"private function processSerializeTypeCollectionObject($objectType, $object, array $data, string $attribute): array\n {\n $subData = $object->{$objectType->getter()}();\n if ($this->checkNullForAttribute($subData, $attribute) === true) {\n return $data;\n }\n\n foreach ($subData as $key => $subObject) {\n $data = $this->setArrayAndCheckNullWithKey($data, $subObject, $key, $attribute);\n }\n\n return $data;\n }",
"def get_filled_structure(self, subgroup=None):\n '''\n method in charged of filling an structure containing the object fields\n values taking into account the 'group' attribute from the corresponding\n form object, which is necesary to fill the details form as it is configured\n in the 'group' attribute\n '''\n # initilize the result structure\n result = []\n\n # the object corresponding model content is taken into a dictionary\n object_content = model_to_dict(self.object)\n\n # generallically some common or specific fields are not interesting\n if 'exclude_fields' not in dir(self):\n self.exclude_fields = []\n\n self.exclude_fields.append(\"id\")\n\n for field in (self.exclude_fields):\n if field in object_content.keys():\n object_content.pop(field)\n # following is going to be created an structure with the appropieate caption\n # for every existing field in the current model\n verbose_names = {}\n for field in object_content.keys():\n verbose_names[field] = self.model._meta.get_field(field).verbose_name\n # the found fields in the groups structure are going to be taked into account\n gr_object_content = []\n\n if subgroup:\n group_array = subgroup\n else:\n group_array = self.groups\n\n for group in group_array:\n\n # raise Exception(group)\n item = {}\n\n item[\"name\"] = smart_text(group[0])\n item[\"col\"] = group[1]\n item_elements = group[2:]\n\n sublist = []\n\n idx = 0\n for item_element in item_elements:\n # the element can contains another groups\n if (idx > 1) and (type(item_element) == tuple):\n # Recursive\n sublist.append(self.get_filled_structure([subgroup]))\n else:\n filter_field = None\n # Check if it is a list\n if type(item_element) == list:\n # if it is a list, that means that can be found the\n # corresponding values for colums and any other\n field = item_element[0]\n # take into account that field caption can be passed as\n # third list element\n if len(item_element) >= 3 and item_element[2]:\n verbose_names[field] = _(item_element[2])\n if len(item_element) >= 9:\n filter_field = item_element[8]\n else:\n\n field = item_element\n\n if field not in verbose_names:\n if field.startswith('get_') and field.endswith('_display'):\n label_field = remove_getdisplay(field)\n if self.model:\n try:\n verbose_names[field] = self.model._meta.get_field(label_field).verbose_name\n except FieldDoesNotExist:\n verbose_names[field] = _(label_field)\n else:\n verbose_names[field] = _(label_field)\n else:\n label_field = field\n verbose_names[field] = _(label_field)\n\n args = {}\n\n value = None\n for field_split in field.split('__'):\n if value is None:\n try:\n verbose_names[field] = self.object._meta.get_field(field_split).verbose_name\n except AttributeError:\n pass\n except FieldDoesNotExist:\n pass\n\n value = getattr(self.object, field_split, None)\n else:\n try:\n verbose_names[field] = value._meta.get_field(field_split).verbose_name\n except AttributeError:\n pass\n except FieldDoesNotExist:\n pass\n value = getattr(value, field_split, None)\n\n if callable(value):\n # if 'request' in value.func_code.co_varnames:\n related = (getattr(value, 'all', None) is not None)\n if related:\n value = \", \".join([str(x) for x in value.all()])\n else:\n if 'request' in value.__code__.co_varnames:\n args['request'] = self.request\n # Call the method\n value = value(**args)\n\n sublist.append({\n \"name\": _(verbose_names[field]),\n \"value\": value,\n \"filter\": filter_field,\n })\n gr_object_content.append(field)\n\n # Increment index\n idx += 1\n\n item[\"value\"] = sublist\n result.append(item)\n\n for field in object_content.keys():\n item = {}\n if field not in gr_object_content:\n item[\"name\"] = _(verbose_names[field])\n item[\"value\"] = getattr(self.object, field)\n result.append(item)\n\n return result",
"def create_payload(self):\n \"\"\"Remove ``smart_class_parameter_id`` or ``smart_variable_id``\"\"\"\n payload = super(OverrideValue, self).create_payload()\n if hasattr(self, 'smart_class_parameter'):\n del payload['smart_class_parameter_id']\n if hasattr(self, 'smart_variable'):\n del payload['smart_variable_id']\n return payload",
"def json_data(self, name):\n \"\"\"Get a JSON compatible structure for the named attribute\n \"\"\"\n\n # Check the write permission of the context\n # XXX: This should be done on field level by the field manager adapter\n if not self.can_write():\n raise Unauthorized(\"You are not allowed to modify this content\")\n\n # fetch the field by name\n field = api.get_field(self.context, name)\n\n # bail out if we have no field\n if not field:\n return None\n\n fieldmanager = IFieldManager(field)\n return fieldmanager.json_data(self.context)",
"def for_entity(obj, check_support_attachments=False):\n \"\"\"Return attachments on an entity.\"\"\"\n if check_support_attachments and not supports_attachments(obj):\n return []\n\n return getattr(obj, ATTRIBUTE)",
"def _override_payload(self, payload):\n \"\"\"\n This function transforms the payload into a new format using the\n self.override_payload property.\n \"\"\"\n if self.override_payload:\n old_payload = payload\n\n def get_value(data, key):\n try:\n parent_key, nested_key = key.split(\".\", 1)\n return get_value(data.get(parent_key, {}), nested_key)\n except ValueError:\n return data.get(key, key)\n\n def set_values(data):\n for key, value in data.items():\n if isinstance(value, dict):\n set_values(value)\n else:\n data[key] = get_value(old_payload, value)\n\n payload = deepcopy(self.override_payload)\n set_values(payload)\n\n return payload"
] |
[
0.8475438952445984,
0.7575864791870117,
0.6440965533256531,
0.6344138979911804,
0.6316420435905457,
0.6299972534179688,
0.6281882524490356,
0.6253945231437683,
0.6251264214515686,
0.6238875985145569,
0.6235304474830627,
0.6234186887741089
] |
Validate url.
|
def validate_url(value):
""" Validate url. """
if not re.match(VIMEO_URL_RE, value) and not re.match(YOUTUBE_URL_RE, value):
raise ValidationError('Invalid URL - only Youtube, Vimeo can be used.')
|
[
"def _validateurl(self, url):\n \"\"\"assembles the server url\"\"\"\n parsed = urlparse(url)\n path = parsed.path.strip(\"/\")\n if path:\n parts = path.split(\"/\")\n url_types = (\"admin\", \"manager\", \"rest\")\n if any(i in parts for i in url_types):\n while parts.pop() not in url_types:\n next\n elif \"services\" in parts:\n while parts.pop() not in \"services\":\n next\n path = \"/\".join(parts)\n else:\n path = \"arcgis\"\n self._adminUrl = \"%s://%s/%s/admin\" % (parsed.scheme, parsed.netloc, path)\n return \"%s://%s/%s/rest/services\" % (parsed.scheme, parsed.netloc, path)",
"def validate_url(url):\n \"\"\"\n Validates the URL\n :param url:\n :return:\n \"\"\"\n if validators.url(url):\n return url\n elif validators.domain(url):\n return \"http://{}\".format(url)\n return \"\"",
"def checkURL(cls, trust_root, url):\n \"\"\"quick func for validating a url against a trust root. See the\n TrustRoot class if you need more control.\"\"\"\n tr = cls.parse(trust_root)\n return tr is not None and tr.validateURL(url)",
"def validate_url(cls, url):\n \"\"\"Strip and trailing slash to validate a url\n\n :param url: the url address\n :return: the valid url address\n :rtype: string\n \"\"\"\n\n if url is None:\n return None\n\n url = url.strip()\n while url.endswith('/'):\n url = url[:-1]\n return url",
"def validate_url(cls, url: str) -> Optional[Match[str]]:\n \"\"\"Check if the Extractor can handle the given url.\"\"\"\n match = re.match(cls._VALID_URL, url)\n return match",
"def ValidateURL(url, column_name=None, problems=None):\n \"\"\"\n Validates a non-required URL value using IsValidURL():\n - if invalid adds InvalidValue error (if problems accumulator is provided)\n - an empty URL is considered valid and no error or warning is issued.\n \"\"\"\n if IsEmpty(url) or IsValidURL(url):\n return True\n else:\n if problems:\n problems.InvalidValue(column_name, url)\n return False",
"function validateUrl (url) {\n if (!url) {\n return option.some('Url is not specified')\n }\n if (typeof url !== 'string') {\n return option.some('Url should be type of string')\n }\n return option.none\n}",
"def valid_url(url):\n \"\"\"Validate url.\n\n :rtype: str\n :return: url\n\n :param str url: package homepage url.\n \"\"\"\n regex = re.compile(\n r'^(?:http)s?://'\n r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\.)+'\n r'(?:[A-Z]{2,6}\\.?|[A-Z0-9-]{2,}\\.?))'\n r'(?:/?|[/?]\\S+)$', re.IGNORECASE)\n if not regex.match(url):\n raise argparse.ArgumentTypeError('\"{0}\" is invalid url.'.format(url))\n return url",
"function validateURL(model, done) {\n var url = model.driver_url;\n parseURL(url, {}, function(err, result) {\n // URL parsing errors are just generic `Error` instances\n // so overwrite name so mongodb-js-server will know\n // the message is safe to display.\n if (err) {\n err.name = 'MongoError';\n }\n done(err, result);\n });\n}",
"def validate_url(self, url):\n \"\"\"Validate the :class:`~urllib.parse.ParseResult` object.\n\n This method will make sure the :meth:`~brownant.app.BrownAnt.parse_url`\n could work as expected even meet a unexpected URL string.\n\n :param url: the parsed url.\n :type url: :class:`~urllib.parse.ParseResult`\n \"\"\"\n # fix up the non-ascii path\n url_path = to_bytes_safe(url.path)\n url_path = urllib.parse.quote(url_path, safe=b\"/%\")\n\n # fix up the non-ascii query\n url_query = to_bytes_safe(url.query)\n url_query = urllib.parse.quote(url_query, safe=b\"?=&\")\n\n url = urllib.parse.ParseResult(url.scheme, url.netloc, url_path,\n url.params, url_query, url.fragment)\n\n # validate the components of URL\n has_hostname = url.hostname is not None and len(url.hostname) > 0\n has_http_scheme = url.scheme in (\"http\", \"https\")\n has_path = not len(url.path) or url.path.startswith(\"/\")\n\n if not (has_hostname and has_http_scheme and has_path):\n raise NotSupported(\"invalid url: %s\" % repr(url))\n\n return url",
"protected function validateUrl()\n {\n $parsedUrl = parse_url($this->apiUrl);\n\n return preg_match('#' . $parsedUrl['host'] . '#i', (string)$this->url);\n }",
"def _validate(url):\n \"\"\"Validate a url.\n\n :param str url: Polling URL extracted from response header.\n :raises: ValueError if URL has no scheme or host.\n \"\"\"\n if url is None:\n return\n parsed = urlparse(url)\n if not parsed.scheme or not parsed.netloc:\n raise ValueError(\"Invalid URL header\")"
] |
[
0.7814081311225891,
0.772642970085144,
0.7723298668861389,
0.760263204574585,
0.7552741169929504,
0.7532185912132263,
0.7522064447402954,
0.7444924712181091,
0.7414736151695251,
0.7404168248176575,
0.7383787035942078,
0.7377429604530334
] |
Enters transaction management for a running thread. It must be balanced
with the appropriate leave_transaction_management call, since the actual
state is managed as a stack.
The state and dirty flag are carried over from the surrounding block or
from the settings, if there is no surrounding block (dirty is always false
when no current block is running).
|
def enter_transaction_management(using=None):
"""
Enters transaction management for a running thread. It must be balanced
with the appropriate leave_transaction_management call, since the actual
state is managed as a stack.
The state and dirty flag are carried over from the surrounding block or
from the settings, if there is no surrounding block (dirty is always false
when no current block is running).
"""
if using is None:
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
connection.enter_transaction_management()
return
connection = tldap.backend.connections[using]
connection.enter_transaction_management()
|
[
"def leave_transaction_management(using=None):\n \"\"\"\n Leaves transaction management for a running thread. A dirty flag is carried\n over to the surrounding block, as a commit will commit all changes, even\n those from outside. (Commits are on connection level.)\n \"\"\"\n if using is None:\n for using in tldap.backend.connections:\n connection = tldap.backend.connections[using]\n connection.leave_transaction_management()\n return\n connection = tldap.backend.connections[using]\n connection.leave_transaction_management()",
"def leave_transaction_management(self) -> None:\n \"\"\"\n End a transaction. Must not be dirty when doing so. ie. commit() or\n rollback() must be called if changes made. If dirty, changes will be\n discarded.\n \"\"\"\n if len(self._transactions) == 0:\n raise RuntimeError(\"leave_transaction_management called outside transaction\")\n elif len(self._transactions[-1]) > 0:\n raise RuntimeError(\"leave_transaction_management called with uncommited rollbacks\")\n else:\n self._transactions.pop()",
"def _transaction_func(entering, exiting, using):\n \"\"\"\n Takes 3 things, an entering function (what to do to start this block of\n transaction management), an exiting function (what to do to end it, on both\n success and failure, and using which can be: None, indiciating transaction\n should occur on all defined servers, or a callable, indicating that using\n is None and to return the function already wrapped.\n\n Returns either a Transaction objects, which is both a decorator and a\n context manager, or a wrapped function, if using is a callable.\n \"\"\"\n # Note that although the first argument is *called* `using`, it\n # may actually be a function; @autocommit and @autocommit('foo')\n # are both allowed forms.\n if callable(using):\n return Transaction(entering, exiting, None)(using)\n return Transaction(entering, exiting, using)",
"def start_transaction(self, read_concern=None, write_concern=None,\n read_preference=None):\n \"\"\"Start a multi-statement transaction.\n\n Takes the same arguments as\n :class:`~pymongo.client_session.TransactionOptions`.\n\n Best used in a context manager block:\n\n .. code-block:: python3\n\n # Use \"await\" for start_session, but not for start_transaction.\n async with await client.start_session() as s:\n async with s.start_transaction():\n await collection.delete_one({'x': 1}, session=s)\n await collection.insert_one({'x': 2}, session=s)\n\n \"\"\"\n self.delegate.start_transaction(read_concern=read_concern,\n write_concern=write_concern,\n read_preference=read_preference)\n return _MotorTransactionContext(self)",
"def transaction(self,implicit = False):\n \"\"\"\n This returns a context guard which will automatically open and close a transaction\n \"\"\"\n\n class TransactionManager(object):\n\n def __init__(self,backend,implicit = False):\n self.backend = backend\n self.implicit = implicit\n\n def __enter__(self):\n self.within_transaction = True if self.backend.current_transaction else False\n self.transaction = self.backend.begin()\n\n def __exit__(self,exc_type,exc_value,traceback_obj):\n if exc_type:\n self.backend.rollback(self.transaction)\n return False\n else:\n #if the transaction has been created implicitly and we are not within\n #another transaction, we leave it open (the user needs to call commit manually)\n #if self.implicit and not self.within_transaction:\n # return\n self.backend.commit(self.transaction)\n\n return TransactionManager(self,implicit = implicit)",
"def execute_transaction(t, header: nil, block_info: nil, ignore_exception: false)\n unless state.find_account(t.sender).balance >= t.gas_price * t.gas_limit + t.value\n raise InvalidTransaction.new('account balance not enough')\n end\n\n # remove gas fee from account balance\n state.add_balance(t.sender, -1 * t.gas_limit * t.gas_price)\n\n intrinsic_gas = fork_schema.intrinsic_gas_of_transaction(t)\n if intrinsic_gas > t.gas_limit\n raise InvalidTransaction.new('intrinsic gas overflowed gas limit')\n end\n\n gas_limit = t.gas_limit - intrinsic_gas\n\n instruction = Instruction.new(\n origin: t.sender,\n price: t.gas_price,\n sender: t.sender,\n value: t.value,\n header: header,\n )\n\n if t.contract_creation?\n instruction.bytes_code = t.data\n else\n instruction.bytes_code = get_account_code(t.to)\n instruction.address = t.to\n instruction.data = t.data\n end\n\n block_info ||= header && BlockInfo.from_header(header)\n context = Ciri::EVM::ExecutionContext.new(\n instruction: instruction, gas_limit: gas_limit,\n block_info: block_info, fork_schema: fork_schema\n )\n vm = Ciri::EVM::VM.new(state: state, chain: @chain, burn_gas_on_exception: true)\n\n unless instruction.value > state.find_account(instruction.sender).balance\n state.increment_nonce(instruction.sender)\n end\n\n vm.with_context(context) do\n if t.contract_creation?\n # contract creation\n vm.create_contract(context: context)\n else\n vm.call_message(context: context)\n end\n raise context.exception if !ignore_exception && context.exception\n\n # refund gas\n sub_state_refund_gas = fork_schema.calculate_refund_gas(vm)\n context.refund_gas(sub_state_refund_gas)\n refund_gas = context.reset_refund_gas\n remain_gas = context.remain_gas\n actually_gas_used = t.gas_limit - remain_gas\n actually_refund_gas = [refund_gas, actually_gas_used / 2].min\n refund_gas_amount = (actually_refund_gas + remain_gas) * t.gas_price\n debug(\"Transaction refund #{refund_gas_amount} to #{t.sender.to_s.hex}\")\n state.add_balance(t.sender, refund_gas_amount)\n\n # gas_used after refund gas\n gas_used = actually_gas_used - actually_refund_gas\n\n # miner fee\n fee = gas_used * t.gas_price\n debug(\"Transaction fee #{fee}\")\n miner_account = find_account(block_info.coinbase)\n miner_account.balance += fee\n state.set_balance(block_info.coinbase, miner_account.balance)\n\n # EIP158 fork, we need to delete miner account if account become empty\n vm.sub_state.add_touched_account(block_info.coinbase)\n vm.delete_empty_accounts\n\n # destroy accounts\n vm.execution_context.all_suicide_accounts.each do |address|\n state.set_balance(address, 0)\n state.delete_account(address)\n end\n\n ExecutionResult.new(status: context.status, state_root: state_root, logs: context.all_log_series,\n gas_used: gas_used, gas_price: t.gas_price, exception: context.exception,\n output: context.output)\n end\n end",
"void entered(Transaction txn, Transaction parent) {\r\n TransactionMonitor monitor = mMonitor;\r\n if (monitor != null) {\r\n monitor.entered(txn, parent);\r\n }\r\n }",
"def txn(self, overwrite=False, lock=True):\n \"\"\"Context manager for a state modification transaction.\"\"\"\n if lock:\n self._lock.acquire()\n try:\n new_state, existing_generation = self.state_and_generation\n new_state = copy.deepcopy(new_state)\n yield new_state\n if overwrite:\n existing_generation = None\n self.set_state(new_state, existing_generation=existing_generation)\n finally:\n if lock:\n self._lock.release()",
"def transaction(self):\n \"\"\"\n Sets up a context where all the statements within it are ran within\n a single database transaction. For internal use only.\n \"\"\"\n # The idea here is to fake the nesting of transactions. Only when\n # we've gotten back to the topmost transaction context do we actually\n # commit or rollback.\n with self.mdr:\n try:\n self._depth += 1\n yield self\n self._depth -= 1\n except self.mdr.OperationalError:\n # We've lost the connection, so there's no sense in\n # attempting to roll back back the transaction.\n self._depth -= 1\n raise\n except:\n self._depth -= 1\n if self._depth == 0:\n self.mdr.rollback()\n raise\n if self._depth == 0:\n self.mdr.commit()",
"def transaction(&block)\n\t\t\tThread.current[:txids] = []\n\t\t\tODBA.storage.transaction(&block)\n\t\trescue Exception => excp\n\t\t\ttransaction_rollback\n\t\t\traise excp\n\t\tensure\n\t\t\tThread.current[:txids] = nil\n\t\tend",
"def fresh_transaction(self, name=None):\n \"\"\"On entrance to this context manager, hold an exclusive lock and\n create a fresh transaction for redshift, then commit and begin a new\n one before releasing the lock on exit.\n\n See drop_relation in RedshiftAdapter for more information.\n\n :param Optional[str] name: The name of the connection to use, or None\n to use the default.\n \"\"\"\n with drop_lock:\n\n connection = self.get_thread_connection()\n\n if connection.transaction_open:\n self.commit()\n\n self.begin()\n yield\n\n self.commit()\n self.begin()",
"def begin_transaction(self, transaction_type, trace_parent=None):\n \"\"\"\n Start a new transactions and bind it in a thread-local variable\n\n :returns the Transaction object\n \"\"\"\n if trace_parent:\n is_sampled = bool(trace_parent.trace_options.recorded)\n else:\n is_sampled = self._sample_rate == 1.0 or self._sample_rate > random.random()\n transaction = Transaction(self, transaction_type, trace_parent=trace_parent, is_sampled=is_sampled)\n if trace_parent is None:\n transaction.trace_parent = TraceParent(\n constants.TRACE_CONTEXT_VERSION,\n \"%032x\" % random.getrandbits(128),\n transaction.id,\n TracingOptions(recorded=is_sampled),\n )\n execution_context.set_transaction(transaction)\n return transaction"
] |
[
0.7948814630508423,
0.7469999194145203,
0.6925532817840576,
0.690844714641571,
0.6850407719612122,
0.6764214038848877,
0.6729089021682739,
0.6670367121696472,
0.6654178500175476,
0.6579058170318604,
0.6569712162017822,
0.6567016839981079
] |
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
|
def leave_transaction_management(using=None):
"""
Leaves transaction management for a running thread. A dirty flag is carried
over to the surrounding block, as a commit will commit all changes, even
those from outside. (Commits are on connection level.)
"""
if using is None:
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
connection.leave_transaction_management()
return
connection = tldap.backend.connections[using]
connection.leave_transaction_management()
|
[
"def leave_transaction_management(self) -> None:\n \"\"\"\n End a transaction. Must not be dirty when doing so. ie. commit() or\n rollback() must be called if changes made. If dirty, changes will be\n discarded.\n \"\"\"\n if len(self._transactions) == 0:\n raise RuntimeError(\"leave_transaction_management called outside transaction\")\n elif len(self._transactions[-1]) > 0:\n raise RuntimeError(\"leave_transaction_management called with uncommited rollbacks\")\n else:\n self._transactions.pop()",
"def enter_transaction_management(using=None):\n \"\"\"\n Enters transaction management for a running thread. It must be balanced\n with the appropriate leave_transaction_management call, since the actual\n state is managed as a stack.\n\n The state and dirty flag are carried over from the surrounding block or\n from the settings, if there is no surrounding block (dirty is always false\n when no current block is running).\n \"\"\"\n if using is None:\n for using in tldap.backend.connections:\n connection = tldap.backend.connections[using]\n connection.enter_transaction_management()\n return\n connection = tldap.backend.connections[using]\n connection.enter_transaction_management()",
"private void leaveTransaction(EntityManager em, Object transaction) {\n if( isJTA ) { \n try { \n if( transaction != null ) { \n // There's a tx running, close it.\n ((UserTransaction) transaction).commit();\n }\n } catch(Exception e) { \n logger.error(\"Unable to commit transaction: \", e);\n }\n } else { \n if( transaction != null ) { \n ((EntityTransaction) transaction).commit();\n }\n }\n \n\n if (!sharedEM) {\n try { \n em.flush();\n em.close(); \n } catch( Exception e ) { \n logger.error(\"Unable to close created EntityManager: {}\", e.getMessage(), e);\n }\n }\n }",
"void save(final ItemStateChangesLog changesLog, final TransactionableResourceManager txResourceManager)\n throws RepositoryException\n {\n if (isSuspended.get())\n {\n try\n {\n latcher.get().await();\n }\n catch (InterruptedException e)\n {\n throw new RepositoryException(e);\n }\n }\n\n workingThreads.incrementAndGet();\n try\n {\n SecurityHelper.doPrivilegedExceptionAction(new PrivilegedExceptionAction<Void>()\n {\n public Void run() throws Exception\n {\n doSave(changesLog, txResourceManager);\n return null;\n }\n });\n }\n catch (PrivilegedActionException e)\n {\n Throwable cause = e.getCause();\n if (cause instanceof RepositoryException)\n {\n throw (RepositoryException)cause;\n }\n else if (cause instanceof RuntimeException)\n {\n throw (RuntimeException)cause;\n }\n else\n {\n throw new RuntimeException(cause);\n }\n }\n finally\n {\n workingThreads.decrementAndGet();\n\n if (isSuspended.get() && workingThreads.get() == 0)\n {\n synchronized (workingThreads)\n {\n workingThreads.notifyAll();\n }\n }\n }\n }",
"@Override\n public void commit() throws RollbackException, HeuristicMixedException,\n HeuristicRollbackException, SecurityException,\n IllegalStateException, SystemException {\n DummyTransaction tx = getTransaction();\n if (tx == null)\n throw new IllegalStateException(\"thread not associated with transaction\");\n tx.commit();\n\n // Disassociate tx from thread.\n setTransaction(null);\n }",
"@Override\n protected void disconnectTransactions(StoredBlock oldBlock) throws PrunedException, BlockStoreException {\n checkState(lock.isHeldByCurrentThread());\n blockStore.beginDatabaseBatchWrite();\n try {\n StoredUndoableBlock undoBlock = blockStore.getUndoBlock(oldBlock.getHeader().getHash());\n if (undoBlock == null) throw new PrunedException(oldBlock.getHeader().getHash());\n TransactionOutputChanges txOutChanges = undoBlock.getTxOutChanges();\n for (UTXO out : txOutChanges.txOutsSpent)\n blockStore.addUnspentTransactionOutput(out);\n for (UTXO out : txOutChanges.txOutsCreated)\n blockStore.removeUnspentTransactionOutput(out);\n } catch (PrunedException e) {\n blockStore.abortDatabaseBatchWrite();\n throw e;\n } catch (BlockStoreException e) {\n blockStore.abortDatabaseBatchWrite();\n throw e;\n }\n }",
"public void localCommit()\r\n {\r\n if (log.isDebugEnabled()) log.debug(\"commit was called\");\r\n if (!this.isInLocalTransaction)\r\n {\r\n throw new TransactionNotInProgressException(\"Not in transaction, call begin() before commit()\");\r\n }\r\n try\r\n {\r\n if(!broker.isManaged())\r\n {\r\n if (batchCon != null)\r\n {\r\n batchCon.commit();\r\n }\r\n else if (con != null)\r\n {\r\n con.commit();\r\n }\r\n }\r\n else\r\n {\r\n if(log.isDebugEnabled()) log.debug(\r\n \"Found managed environment setting in PB, will skip Connection.commit() call\");\r\n }\r\n }\r\n catch (SQLException e)\r\n {\r\n log.error(\"Commit on underlying connection failed, try to rollback connection\", e);\r\n this.localRollback();\r\n throw new TransactionAbortedException(\"Commit on connection failed\", e);\r\n }\r\n finally\r\n {\r\n this.isInLocalTransaction = false;\r\n restoreAutoCommitState();\r\n this.releaseConnection();\r\n }\r\n }",
"public void beforeCompletion()\r\n {\r\n // avoid redundant calls\r\n if(beforeCompletionCall) return;\r\n\r\n log.info(\"Method beforeCompletion was called\");\r\n int status = Status.STATUS_UNKNOWN;\r\n try\r\n {\r\n JTATxManager mgr = (JTATxManager) getImplementation().getTxManager();\r\n status = mgr.getJTATransaction().getStatus();\r\n // ensure proper work, check all possible status\r\n // normally only check for 'STATUS_MARKED_ROLLBACK' is necessary\r\n if(status == Status.STATUS_MARKED_ROLLBACK\r\n || status == Status.STATUS_ROLLEDBACK\r\n || status == Status.STATUS_ROLLING_BACK\r\n || status == Status.STATUS_UNKNOWN\r\n || status == Status.STATUS_NO_TRANSACTION)\r\n {\r\n log.error(\"Synchronization#beforeCompletion: Can't prepare for commit, because tx status was \"\r\n + TxUtil.getStatusString(status) + \". Do internal cleanup only.\");\r\n }\r\n else\r\n {\r\n if(log.isDebugEnabled())\r\n {\r\n log.debug(\"Synchronization#beforeCompletion: Prepare for commit\");\r\n }\r\n // write objects to database\r\n prepareCommit();\r\n }\r\n }\r\n catch(Exception e)\r\n {\r\n log.error(\"Synchronization#beforeCompletion: Error while prepare for commit\", e);\r\n if(e instanceof LockNotGrantedException)\r\n {\r\n throw (LockNotGrantedException) e;\r\n }\r\n else if(e instanceof TransactionAbortedException)\r\n {\r\n throw (TransactionAbortedException) e;\r\n }\r\n else if(e instanceof ODMGRuntimeException)\r\n {\r\n throw (ODMGRuntimeException) e;\r\n }\r\n else\r\n { \r\n throw new ODMGRuntimeException(\"Method beforeCompletion() fails, status of JTA-tx was \"\r\n + TxUtil.getStatusString(status) + \", message: \" + e.getMessage());\r\n }\r\n\r\n }\r\n finally\r\n {\r\n beforeCompletionCall = true;\r\n setInExternTransaction(false);\r\n internalCleanup();\r\n }\r\n }",
"public void onExit(Propagation propagation, String interceptionId,\n TransactionCallback callback) throws HeuristicRollbackException, HeuristicMixedException, SystemException,\n InvalidTransactionException {\n Transaction current = getActiveTransaction();\n if (callback == null) {\n callback = new TransactionCallback() {\n @Override\n public void transactionCommitted(Transaction transaction) {\n // Do nothing\n }\n\n @Override\n public void transactionRolledBack(Transaction transaction) {\n // Do nothing\n }\n };\n }\n\n switch (propagation) {\n case REQUIRES:\n // Are we the owner of the transaction?\n if (owned.contains(current)) { // Owner.\n try {\n current.commit(); // Commit the transaction\n owned.remove(current);\n callback.transactionCommitted(current);\n } catch (RollbackException e) {\n owned.remove(current);\n e.printStackTrace();\n callback.transactionRolledBack(current);\n }\n } // Else wait for commit.\n break;\n case MANDATORY:\n // We are never the owner, so just exits the transaction.\n break;\n case SUPPORTED:\n // Do nothing.\n break;\n case NOT_SUPPORTED:\n // We may have suspended a transaction if one, resume it\n // If we have another transaction and we have suspended a transaction,\n // throw an IllegalStateException because it's impossible to resume\n // the suspended transaction. If we didn't suspend a transaction, accept the new transaction (user\n // responsibility)\n List<Transaction> susp = suspended.get(Thread.currentThread());\n if (current != null && !susp.isEmpty()) {\n throw new IllegalStateException(\"Error while handling \" + interceptionId + \" : you cannot start a\" +\n \" transaction after having suspended one. We would not be able to resume the suspended \" +\n \"transaction\");\n } else if (current == null && !susp.isEmpty()) {\n manager.resume(susp.remove(susp.size() - 1));\n }\n break;\n case NEVER:\n // Do nothing.\n break;\n case REQUIRES_NEW:\n // We're necessary the owner.\n try {\n current.commit(); // Commit the transaction\n owned.remove(current);\n callback.transactionCommitted(current);\n List<Transaction> suspendedTransactions = suspended.get(Thread.currentThread());\n if (suspendedTransactions != null && !suspendedTransactions.isEmpty()) {\n // suspend the completed transaction.\n Transaction trans = suspendedTransactions.get(suspendedTransactions.size() - 1);\n manager.suspend();\n suspendedTransactions.remove(trans);\n manager.resume(trans);\n }\n } catch (RollbackException e) { // The transaction was rolledback rather than committed\n owned.remove(current);\n callback.transactionRolledBack(current);\n\n List<Transaction> suspendedTransactions = suspended.get(Thread.currentThread());\n if (suspendedTransactions != null && !suspendedTransactions.isEmpty()) {\n // suspend the transaction.\n Transaction trans = suspendedTransactions.get(suspendedTransactions.size() - 1);\n manager.suspend();\n suspendedTransactions.remove(trans);\n manager.resume(trans);\n }\n }\n break;\n default:\n throw new UnsupportedOperationException(\"Unknown or unsupported propagation policy for \" + interceptionId + \" :\" +\n propagation);\n\n }\n }",
"@Override\n public void commit() throws TTException {\n checkState(!mDelegate.isClosed(), \"Transaction already closed\");\n\n mDelegate.mSession.waitForRunningCommit();\n\n final UberBucket uber = clone(mNewUber);\n final MetaBucket meta = clone(mNewMeta);\n final RevisionRootBucket rev = clone(mNewRoot);\n // storing the reference to the former log.\n mFormerLog = mLog;\n // new log\n // mLog = new MemoryLog();\n mLog =\n new LRULog(new File(mDelegate.mSession.getConfig().mProperties\n .getProperty(org.treetank.access.conf.ConstructorProps.RESOURCEPATH)), mDelegate.mSession\n .getConfig().mDataFac, mDelegate.mSession.getConfig().mMetaFac);\n\n mDelegate.mSession.setRunningCommit(mCommitInProgress.submit(new CommitCallable(uber, rev, meta)));\n // Comment here to enabled blocked behaviour\n // mDelegate.mSession.waitForRunningCommit();\n\n setUpTransaction(uber, rev, meta, mDelegate.mSession, uber.getRevisionNumber());\n\n }",
"private void internalCleanup()\r\n {\r\n if(hasBroker())\r\n {\r\n PersistenceBroker broker = getBroker();\r\n if(log.isDebugEnabled())\r\n {\r\n log.debug(\"Do internal cleanup and close the internal used connection without\" +\r\n \" closing the used broker\");\r\n }\r\n ConnectionManagerIF cm = broker.serviceConnectionManager();\r\n if(cm.isInLocalTransaction())\r\n {\r\n /*\r\n arminw:\r\n in managed environment this call will be ignored because, the JTA transaction\r\n manager control the connection status. But to make connectionManager happy we\r\n have to complete the \"local tx\" of the connectionManager before release the\r\n connection\r\n */\r\n cm.localCommit();\r\n }\r\n cm.releaseConnection();\r\n }\r\n }",
"void exited(Transaction txn, Transaction active) {\r\n TransactionMonitor monitor = mMonitor;\r\n if (monitor != null) {\r\n monitor.exited(txn, active);\r\n }\r\n }"
] |
[
0.7795088887214661,
0.7766814827919006,
0.7272088527679443,
0.6986686587333679,
0.6957940459251404,
0.6925381422042847,
0.6876813769340515,
0.686309278011322,
0.6844186186790466,
0.6785255670547485,
0.6785023212432861,
0.6779400110244751
] |
Returns True if the current transaction requires a commit for changes to
happen.
|
def is_dirty(using=None):
"""
Returns True if the current transaction requires a commit for changes to
happen.
"""
if using is None:
dirty = False
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
if connection.is_dirty():
dirty = True
return dirty
connection = tldap.backend.connections[using]
return connection.is_dirty()
|
[
"def has_commit(self):\n \"\"\"\n :return:\n :rtype: boolean\n \"\"\"\n current_revision = self.history.current_revision\n revision_id = self.state.revision_id\n\n return current_revision.revision_id != revision_id",
"def has_commit(self, client_key=None):\n \"\"\"\n Return True if client has new commit.\n\n :param client_key: The client key\n :type client_key: str\n :return:\n :rtype: boolean\n \"\"\"\n if client_key is None and self.current_client is None:\n raise ClientNotExist()\n\n if client_key:\n if not self.clients.has_client(client_key):\n raise ClientNotExist()\n\n client = self.clients.get_client(client_key)\n\n return client.has_commit()\n\n if self.current_client:\n client = self.current_client\n\n return client.has_commit()\n\n return False",
"def _maybe_commit(self, transaction):\n \"\"\"Try to commit the transaction.\n\n If the transaction is read-write and the ``Commit`` fails with the\n ``ABORTED`` status code, it will be retried. Any other failure will\n not be caught.\n\n Args:\n transaction (~.firestore_v1beta1.transaction.Transaction): The\n transaction to be ``Commit``-ed.\n\n Returns:\n bool: Indicating if the commit succeeded.\n \"\"\"\n try:\n transaction._commit()\n return True\n except exceptions.GoogleAPICallError as exc:\n if transaction._read_only:\n raise\n\n if isinstance(exc, exceptions.Aborted):\n # If a read-write transaction returns ABORTED, retry.\n return False\n else:\n raise",
"def get_commit_req(self):\n \"\"\"Lazy commit request getter.\"\"\"\n if not self.commit_req:\n self.commit_req = datastore.CommitRequest()\n self.commit_req.transaction = self.tx\n return self.commit_req",
"def is_dirty(self) -> bool:\n \"\"\" Are there uncommitted changes? \"\"\"\n if len(self._transactions) == 0:\n raise RuntimeError(\"is_dirty called outside a transaction.\")\n if len(self._transactions[-1]) > 0:\n return True\n return False",
"def commit(self):\n \"\"\"git commit and return whether there were changes\"\"\"\n self.git.add('-A', '.')\n try:\n self.git.commit('-m', self.commit_msg)\n return True\n except sh.ErrorReturnCode_1:\n return False",
"def have_active_commit(self):\n \"\"\" Checks if there is an active commit owned by the specified user \"\"\"\n\n commit_state = sfs.file_or_default(sfs.cpjoin(self.base_path, 'active_commit'), None)\n if commit_state != None: return True\n return False",
"def _has_commit(version, debug=False):\n \"\"\"\n Determine a version is a local git commit sha or not.\n\n :param version: A string containing the branch/tag/sha to be determined.\n :param debug: An optional bool to toggle debug output.\n :return: bool\n \"\"\"\n if _has_tag(version, debug) or _has_branch(version, debug):\n return False\n cmd = sh.git.bake('cat-file', '-e', version)\n try:\n util.run_command(cmd, debug=debug)\n return True\n except sh.ErrorReturnCode:\n return False",
"def _auto_commit(self):\n \"\"\"\n Check if we have to commit based on number of messages and commit\n \"\"\"\n\n # Check if we are supposed to do an auto-commit\n if not self.auto_commit or self.auto_commit_every_n is None:\n return\n\n if self.count_since_commit >= self.auto_commit_every_n:\n self.commit()",
"protected boolean shouldCommitDuringPrepare(PrepareCommand command, TxInvocationContext ctx) {\n return totalOrder ?\n command.isOnePhaseCommit() && (!ctx.isOriginLocal() || !command.hasModifications()) :\n command.isOnePhaseCommit();\n }",
"def commit(self, sql=None):\n \"\"\"Commit the current transaction.\"\"\"\n self._transaction = False\n try:\n commit = self._con.commit\n except AttributeError:\n return self._con.query(sql or 'commit')\n else:\n if sql:\n return commit(sql=sql)\n else:\n return commit()",
"def is_changed():\n \"\"\" Checks if current project has any noncommited changes. \"\"\"\n executed, changed_lines = execute_git('status --porcelain', output=False)\n merge_not_finished = mod_path.exists('.git/MERGE_HEAD')\n return changed_lines.strip() or merge_not_finished"
] |
[
0.7338279485702515,
0.7293597459793091,
0.725247859954834,
0.7228243350982666,
0.715399980545044,
0.7153799533843994,
0.7116058468818665,
0.7112911343574524,
0.6996734738349915,
0.6891674995422363,
0.6886998414993286,
0.6882462501525879
] |
Checks whether the transaction manager is in manual or in auto state.
|
def is_managed(using=None):
"""
Checks whether the transaction manager is in manual or in auto state.
"""
if using is None:
managed = False
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
if connection.is_managed():
managed = True
return managed
connection = tldap.backend.connections[using]
return connection.is_managed()
|
[
"public void setTransactionManualMode(boolean manualMode) {\n this.transactionHandler.setManualMode(manualMode);\n overrider.override(MjdbcConstants.OVERRIDE_INT_IS_MANUAL_MODE, manualMode);\n }",
"public boolean isManualQuery()\n {\n boolean bIsManual = false;\n if (this.getTable().getCurrentTable() instanceof QueryTable) // Only if I am faking a query\n bIsManual = true;\n if (this.getDatabaseType() == DBConstants.MANUAL_QUERY)\n bIsManual = true;\n return bIsManual;\n }",
"boolean isManaged() {\n if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())\n SibTr.entry(this, tc, \"isManaged\");\n if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())\n SibTr.exit(this, tc, \"isManaged\", isManaged);\n return isManaged;\n }",
"public boolean isBmtActive(EJBMethodInfoImpl methodInfo)\n {\n TranStrategy ts = txStrategies[methodInfo.getTransactionAttribute().getValue()];\n return ts.isBmtActive();\n }",
"protected boolean hasOngoingTransaction() {\n\t\tif(!isSupervisedMode()) {\n\t\t\treturn false;\n\t\t} else {\n\t\t\tif(ongoingTransactions != null) {\n\t\t\t\tfor (Transaction transaction : ongoingTransactions) {\n\t\t\t\t\tif(TransactionState.CALLING.equals(transaction.getState()) ||\n\t\t\t\t\t\tTransactionState.TRYING.equals(transaction.getState()) ||\n\t\t\t\t\t\tTransactionState.PROCEEDING.equals(transaction.getState()) ||\n\t\t\t\t\t\tTransactionState.COMPLETED.equals(transaction.getState()) ||\n\t\t\t\t\t\tTransactionState.CONFIRMED.equals(transaction.getState())) {\n\t\t\t\t\t\t\treturn true;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn false;\n\t\t}\n\t}",
"public final boolean isTransactional() {\n // Take a snapshot of the value with first use (or reuse from pool) of the managed connection.\n // This value will be cleared when the managed connection is returned to the pool.\n if (transactional == null) {\n transactional = mcf.dsConfig.get().transactional;\n if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())\n Tr.debug(this, tc, \"transactional=\", transactional);\n }\n\n return transactional;\n }",
"public static boolean isTMActive()\n throws EFapsException\n {\n try {\n return Context.TRANSMANAG.getStatus() == Status.STATUS_ACTIVE;\n } catch (final SystemException e) {\n throw new EFapsException(Context.class, \"isTMActive.SystemException\", e);\n }\n }",
"def triggered(self, manual=False):\n \"\"\"Check if this streamer should generate a report.\n\n Streamers can be triggered automatically whenever they have data\n or they can be triggered manually. This method returns True if the\n streamer is currented triggered.\n\n A streamer is triggered if it:\n - (has data AND is automatic) OR\n - (has data AND is manually triggered)\n\n Args:\n manual (bool): Indicate that the streamer has been manually triggered.\n\n Returns:\n bool: Whether the streamer can generate a report right now.\n \"\"\"\n\n if self.walker is None:\n raise InternalError(\"You can only check if a streamer is triggered if you create it with a SensorLog\")\n\n if not self.automatic and not manual:\n return False\n\n return self.has_data()",
"public final boolean isGlobalTransactionActive() {\n UOWCurrent uow = (UOWCurrent) mcf.connectorSvc.getTransactionManager();\n UOWCoordinator coord = uow == null ? null : uow.getUOWCoord();\n return coord != null && coord.isGlobal();\n }",
"def enter_transaction_management(using=None):\n \"\"\"\n Enters transaction management for a running thread. It must be balanced\n with the appropriate leave_transaction_management call, since the actual\n state is managed as a stack.\n\n The state and dirty flag are carried over from the surrounding block or\n from the settings, if there is no surrounding block (dirty is always false\n when no current block is running).\n \"\"\"\n if using is None:\n for using in tldap.backend.connections:\n connection = tldap.backend.connections[using]\n connection.enter_transaction_management()\n return\n connection = tldap.backend.connections[using]\n connection.enter_transaction_management()",
"public boolean isGlobalTxActive()\n {\n TransactionContext ctx;\n try\n {\n // We need to check the status also to be able to manage properly suspend and resume\n return (ctx = contexts.get()) != null && ctx.getXidContext() != null && tm.getStatus() != Status.STATUS_NO_TRANSACTION;\n }\n catch (SystemException e)\n {\n log.warn(\"Could not check if a global Tx has been started\", e);\n }\n return false;\n }",
"public SIMPTransactionManager getTXManager() {\n if (TraceComponent.isAnyTracingEnabled()\n && tc.isEntryEnabled())\n SibTr.entry(tc, \"getTXManager\");\n\n if (TraceComponent.isAnyTracingEnabled()\n && tc.isEntryEnabled())\n SibTr.exit(tc, \"getTXManager\", _txManager);\n\n return _txManager;\n }"
] |
[
0.7647520303726196,
0.7216840982437134,
0.7114665508270264,
0.70999675989151,
0.7045300006866455,
0.6824000477790833,
0.6787397861480713,
0.678667426109314,
0.6777503490447998,
0.6752684116363525,
0.6746699213981628,
0.6737667322158813
] |
Does the commit itself and resets the dirty flag.
|
def commit(using=None):
"""
Does the commit itself and resets the dirty flag.
"""
if using is None:
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
connection.commit()
return
connection = tldap.backend.connections[using]
connection.commit()
|
[
"public function commit()\n\t{\n\t\tif ( $this->_initialized && $this->_dirty )\n\t\t{\n\t\t\t$this->_saveCache();\n\t\t\t$this->_dirty = false;\n\t\t}\n\t}",
"def commit(self):\n \"\"\"Commit dirty records to the server. This method is automatically\n called when the `auto_commit` option is set to `True` (default).\n It can be useful to set the former option to `False` to get better\n performance by reducing the number of RPC requests generated.\n\n With `auto_commit` set to `True` (default behaviour), each time a value\n is set on a record field a RPC request is sent to the server to update\n the record:\n\n .. doctest::\n\n >>> user = odoo.env.user\n >>> user.name = \"Joe\" # write({'name': \"Joe\"})\n >>> user.email = \"joe@odoo.net\" # write({'email': \"joe@odoo.net\"})\n\n With `auto_commit` set to `False`, changes on a record are sent all at\n once when calling the :func:`commit` method:\n\n .. doctest::\n\n >>> odoo.config['auto_commit'] = False\n >>> user = odoo.env.user\n >>> user.name = \"Joe\"\n >>> user.email = \"joe@odoo.net\"\n >>> user in odoo.env.dirty\n True\n >>> odoo.env.commit() # write({'name': \"Joe\", 'email': \"joe@odoo.net\"})\n >>> user in odoo.env.dirty\n False\n\n Only one RPC request is generated in the last case.\n \"\"\"\n # Iterate on a new set, as we remove record during iteration from the\n # original one\n for record in set(self.dirty):\n values = {}\n for field in record._values_to_write:\n if record.id in record._values_to_write[field]:\n value = record._values_to_write[field].pop(record.id)\n values[field] = value\n # Store the value in the '_values' dictionary. This\n # operation is delegated to each field descriptor as some\n # values can not be stored \"as is\" (e.g. magic tuples of\n # 2many fields need to be converted)\n record.__class__.__dict__[field].store(record, value)\n record.write(values)\n self.dirty.remove(record)",
"public void commit() {\n if (directory == null)\n return;\n\n try {\n if (reader != null)\n reader.close();\n\n // it turns out that IndexWriter.optimize actually slows\n // searches down, because it invalidates the cache. therefore\n // not calling it any more.\n // http://www.searchworkings.org/blog/-/blogs/uwe-says%3A-is-your-reader-atomic\n // iwriter.optimize();\n\n iwriter.commit();\n openSearchers();\n } catch (IOException e) {\n throw new DukeException(e);\n }\n }",
"@Override\n public void setAutoCommit(boolean autoCommit) throws SQLException\n {\n delegate.setAutoCommit(autoCommit);\n isAutoCommit = autoCommit;\n dirtyBits |= DIRTY_BIT_AUTOCOMMIT;\n }",
"private function commitAll()\n {\n while ($this->transactionNestingLevel !== 0) {\n if ($this->autoCommit === false && $this->transactionNestingLevel === 1) {\n // When in no auto-commit mode, the last nesting commit immediately starts a new transaction.\n // Therefore we need to do the final commit here and then leave to avoid an infinite loop.\n $this->commit();\n\n return;\n }\n\n $this->commit();\n }\n }",
"def commit(self) -> None:\n \"\"\"\n Attempt to commit all changes to LDAP database. i.e. forget all\n rollbacks. However stay inside transaction management.\n \"\"\"\n if len(self._transactions) == 0:\n raise RuntimeError(\"commit called outside transaction\")\n\n # If we have nested transactions, we don't actually commit, but push\n # rollbacks up to previous transaction.\n if len(self._transactions) > 1:\n for on_rollback in reversed(self._transactions[-1]):\n self._transactions[-2].insert(0, on_rollback)\n\n _debug(\"commit\")\n self.reset()",
"public final void commit() throws IllegalStateException, RepositoryException\n {\n checkIfOpened();\n try\n {\n closeStatements();\n\n if (!readOnly)\n {\n try\n {\n for (ValueIOChannel vo : valueChanges)\n {\n vo.twoPhaseCommit();\n }\n }\n catch (IOException e)\n {\n throw new RepositoryException(e);\n }\n finally\n {\n valueChanges.clear();\n }\n if (getDbConnectionTotalUsed() == 1)\n {\n // We don't commit as long as it is used\n dbConnection.commit();\n }\n }\n }\n catch (SQLException e)\n {\n throw new RepositoryException(e);\n }\n finally\n {\n try\n {\n if (release() == 0)\n {\n // We don't close the connection as long as it is used\n dbConnection.close();\n }\n }\n catch (SQLException e)\n {\n if (LOG.isWarnEnabled())\n {\n LOG.warn(\"Could not close the connection\", e);\n }\n }\n }\n }",
"public void commit() throws RollbackException,\n HeuristicMixedException,\n HeuristicRollbackException,\n SecurityException,\n IllegalStateException,\n SystemException\n {\n if (status == Status.STATUS_UNKNOWN)\n throw new IllegalStateException(\"Status unknown\");\n\n if (status == Status.STATUS_MARKED_ROLLBACK)\n throw new IllegalStateException(\"Status marked rollback\");\n\n finish(true);\n }",
"public function commit()\n {\n // Raise preFlush\n if ($this->eventManager->hasListeners(Events::preFlush)) {\n $this->eventManager->dispatchEvent(Events::preFlush, new PreFlushEventArgs($this->em));\n }\n\n $this->computeChangeSets();\n\n if (! ($this->entityInsertions ||\n $this->entityDeletions ||\n $this->entityUpdates ||\n $this->collectionUpdates ||\n $this->collectionDeletions ||\n $this->orphanRemovals)) {\n $this->dispatchOnFlushEvent();\n $this->dispatchPostFlushEvent();\n\n $this->postCommitCleanup();\n\n return; // Nothing to do.\n }\n\n $this->assertThatThereAreNoUnintentionallyNonPersistedAssociations();\n\n if ($this->orphanRemovals) {\n foreach ($this->orphanRemovals as $orphan) {\n $this->remove($orphan);\n }\n }\n\n $this->dispatchOnFlushEvent();\n\n // Now we need a commit order to maintain referential integrity\n $commitOrder = $this->getCommitOrder();\n\n $conn = $this->em->getConnection();\n $conn->beginTransaction();\n\n try {\n // Collection deletions (deletions of complete collections)\n foreach ($this->collectionDeletions as $collectionToDelete) {\n $this->getCollectionPersister($collectionToDelete->getMapping())->delete($collectionToDelete);\n }\n\n if ($this->entityInsertions) {\n foreach ($commitOrder as $class) {\n $this->executeInserts($class);\n }\n }\n\n if ($this->entityUpdates) {\n foreach ($commitOrder as $class) {\n $this->executeUpdates($class);\n }\n }\n\n // Extra updates that were requested by persisters.\n if ($this->extraUpdates) {\n $this->executeExtraUpdates();\n }\n\n // Collection updates (deleteRows, updateRows, insertRows)\n foreach ($this->collectionUpdates as $collectionToUpdate) {\n $this->getCollectionPersister($collectionToUpdate->getMapping())->update($collectionToUpdate);\n }\n\n // Entity deletions come last and need to be in reverse commit order\n if ($this->entityDeletions) {\n foreach (array_reverse($commitOrder) as $committedEntityName) {\n if (! $this->entityDeletions) {\n break; // just a performance optimisation\n }\n\n $this->executeDeletions($committedEntityName);\n }\n }\n\n $conn->commit();\n } catch (Throwable $e) {\n $this->em->close();\n $conn->rollBack();\n\n $this->afterTransactionRolledBack();\n\n throw $e;\n }\n\n $this->afterTransactionComplete();\n\n // Take new snapshots from visited collections\n foreach ($this->visitedCollections as $coll) {\n $coll->takeSnapshot();\n }\n\n $this->dispatchPostFlushEvent();\n\n $this->postCommitCleanup();\n }",
"public void localCommit()\r\n {\r\n if (log.isDebugEnabled()) log.debug(\"commit was called\");\r\n if (!this.isInLocalTransaction)\r\n {\r\n throw new TransactionNotInProgressException(\"Not in transaction, call begin() before commit()\");\r\n }\r\n try\r\n {\r\n if(!broker.isManaged())\r\n {\r\n if (batchCon != null)\r\n {\r\n batchCon.commit();\r\n }\r\n else if (con != null)\r\n {\r\n con.commit();\r\n }\r\n }\r\n else\r\n {\r\n if(log.isDebugEnabled()) log.debug(\r\n \"Found managed environment setting in PB, will skip Connection.commit() call\");\r\n }\r\n }\r\n catch (SQLException e)\r\n {\r\n log.error(\"Commit on underlying connection failed, try to rollback connection\", e);\r\n this.localRollback();\r\n throw new TransactionAbortedException(\"Commit on connection failed\", e);\r\n }\r\n finally\r\n {\r\n this.isInLocalTransaction = false;\r\n restoreAutoCommitState();\r\n this.releaseConnection();\r\n }\r\n }",
"void commit() {\n\t\tif (connection != null) {\n\t\t\ttry {\n\t\t\t\tconnection.commit();\n\t\t\t} catch (SQLException e) {\n\t\t\t\tthrow new UroborosqlSQLException(e);\n\t\t\t}\n\t\t}\n\t\tclearState();\n\t}",
"public final void commit() {\n if (indexQueue == null) {\n luceneIndex.commit();\n } else {\n indexQueue.submitSynchronous(new Runnable() {\n @Override\n public void run() {\n luceneIndex.commit();\n }\n });\n }\n }"
] |
[
0.7501399517059326,
0.743144690990448,
0.7354114055633545,
0.7335783243179321,
0.7292166352272034,
0.7251945734024048,
0.7217972874641418,
0.7213919162750244,
0.721125066280365,
0.7208200693130493,
0.71666419506073,
0.7162477374076843
] |
This function does the rollback itself and resets the dirty flag.
|
def rollback(using=None):
"""
This function does the rollback itself and resets the dirty flag.
"""
if using is None:
for using in tldap.backend.connections:
connection = tldap.backend.connections[using]
connection.rollback()
return
connection = tldap.backend.connections[using]
connection.rollback()
|
[
"def rollback(self) -> None:\n \"\"\"\n Roll back to previous database state. However stay inside transaction\n management.\n \"\"\"\n if len(self._transactions) == 0:\n raise RuntimeError(\"rollback called outside transaction\")\n\n _debug(\"rollback:\", self._transactions[-1])\n # if something goes wrong here, nothing we can do about it, leave\n # database as is.\n try:\n # for every rollback action ...\n for on_rollback in self._transactions[-1]:\n # execute it\n _debug(\"--> rolling back\", on_rollback)\n self._do_with_retry(on_rollback)\n except: # noqa: E722\n _debug(\"--> rollback failed\")\n exc_class, exc, tb = sys.exc_info()\n raise tldap.exceptions.RollbackError(\n \"FATAL Unrecoverable rollback error: %r\" % exc)\n finally:\n # reset everything to clean state\n _debug(\"--> rollback success\")\n self.reset()",
"def doRollback(self):\n '''\n Call each rollback step in LIFO order.\n '''\n while self.steps:\n callback, args, kwargs = self.steps.pop()\n callback(*args, **kwargs)",
"def rollback(**kwargs):\n '''\n Roll back the last committed configuration changes and commit\n\n id : 0\n The rollback ID value (0-49)\n\n dev_timeout : 30\n The NETCONF RPC timeout (in seconds)\n\n comment\n Provide a comment for the commit\n\n confirm\n Provide time in minutes for commit confirmation. If this option is\n specified, the commit will be rolled back in the specified amount of time\n unless the commit is confirmed.\n\n diffs_file\n Path to the file where the diff (difference in old configuration and the\n committed configuration) will be stored. Note that the file will be\n stored on the proxy minion. To push the files to the master use\n :py:func:`cp.push <salt.modules.cp.push>`.\n\n CLI Example:\n\n .. code-block:: bash\n\n salt 'device_name' junos.rollback 10\n '''\n id_ = kwargs.pop('id', 0)\n\n ret = {}\n conn = __proxy__['junos.conn']()\n\n op = dict()\n if '__pub_arg' in kwargs:\n if kwargs['__pub_arg']:\n if isinstance(kwargs['__pub_arg'][-1], dict):\n op.update(kwargs['__pub_arg'][-1])\n else:\n op.update(kwargs)\n\n try:\n ret['out'] = conn.cu.rollback(id_)\n except Exception as exception:\n ret['message'] = 'Rollback failed due to \"{0}\"'.format(exception)\n ret['out'] = False\n return ret\n\n if ret['out']:\n ret['message'] = 'Rollback successful'\n else:\n ret['message'] = 'Rollback failed'\n return ret\n\n if 'diffs_file' in op and op['diffs_file'] is not None:\n diff = conn.cu.diff()\n if diff is not None:\n with salt.utils.files.fopen(op['diffs_file'], 'w') as fp:\n fp.write(salt.utils.stringutils.to_str(diff))\n else:\n log.info(\n 'No diff between current configuration and \\\n rollbacked configuration, so no diff file created')\n\n try:\n commit_ok = conn.cu.commit_check()\n except Exception as exception:\n ret['message'] = 'Could not commit check due to \"{0}\"'.format(\n exception)\n ret['out'] = False\n return ret\n\n if commit_ok:\n try:\n conn.cu.commit(**op)\n ret['out'] = True\n except Exception as exception:\n ret['out'] = False\n ret['message'] = \\\n 'Rollback successful but commit failed with error \"{0}\"'\\\n .format(exception)\n return ret\n else:\n ret['message'] = 'Rollback succesfull but pre-commit check failed.'\n ret['out'] = False\n return ret",
"def rollback(self):\n \"\"\"Revert the previous modification to the refpkg.\n \"\"\"\n # This is slightly complicated because of Python's freakish\n # assignment semantics and because we don't store multiple\n # copies of the log.\n if self.contents['rollback'] is None:\n raise ValueError(\"No operation to roll back on refpkg\")\n future_msg = self.contents['log'][0]\n rolledback_log = self.contents['log'][1:]\n rollforward = copy.deepcopy(self.contents)\n rollforward.pop('rollback')\n self.contents = self.contents['rollback']\n self.contents['log'] = rolledback_log\n self.contents['rollforward'] = [future_msg, rollforward]\n self._sync_to_disk()",
"def rollback(self):\n \"\"\"\n Netmiko is being used to commit the rollback configuration because\n it takes a better care of results compared to pan-python.\n \"\"\"\n if self.changed:\n rollback_cmd = '<load><config><from>{0}</from></config></load>'.format(self.backup_file)\n self.device.op(cmd=rollback_cmd)\n time.sleep(5)\n\n if self.ssh_connection is False:\n self._open_ssh()\n try:\n self.ssh_device.commit()\n self.loaded = False\n self.changed = False\n self.merge_config = False\n except: # noqa\n ReplaceConfigException(\"Error while loading backup config\")",
"void rollback()\n\t{\n\t\tswitch (data_type)\n\t\t{\n\t\tcase Tango_DEV_BOOLEAN :\n\t\t\tbool_val = old_bool_val;\n\t\t\tbreak;\n\t\t\n\t\tcase Tango_DEV_SHORT :\n\t\t\tshort_val = old_short_val;\n\t\t\tbreak;\n\t\t\n\t\tcase Tango_DEV_LONG :\n\t\t\tlong_val = old_long_val;\n\t\t\tbreak;\n\t\t\n\t\tcase Tango_DEV_LONG64 :\n\t\t\tlong64_val = old_long64_val;\n\t\t\tbreak;\n\t\t\n\t\tcase Tango_DEV_DOUBLE :\n\t\t\tdouble_val = old_double_val;\n\t\t\tbreak;\n\t\t\n\t\tcase Tango_DEV_STRING :\n\t\t\tstr_val = old_str_val;\n\t\t\tbreak;\n\t\t}\n\t}",
"def rollback(self):\n \"\"\"Implementation of NAPALM method rollback.\"\"\"\n commands = []\n commands.append('configure replace flash:rollback-0')\n commands.append('write memory')\n self.device.run_commands(commands)",
"def cmd_rollback(context):\n \"\"\"\n Roll back by finding the most recent \"stable\" tagged version, and putting it again, so that\n it's the new \"current\" version.\n Args:\n context: a populated EFVersionContext object\n \"\"\"\n last_stable = get_versions(context, return_stable=True)\n if len(last_stable) != 1:\n fail(\"Didn't find a version marked stable for key: {} in env/service: {}/{}\".format(\n context.key, context.env, context.service_name))\n context.value = last_stable[0].value\n context.commit_hash = last_stable[0].commit_hash\n context.build_number = last_stable[0].build_number\n context.location = last_stable[0].location\n context.stable = True\n cmd_set(context)",
"def rollback(self):\n \"\"\"\n .. seealso:: :py:meth:`sqlite3.Connection.rollback`\n \"\"\"\n\n try:\n self.check_connection()\n except NullDatabaseConnectionError:\n return\n\n logger.debug(\"rollback: path='{}'\".format(self.database_path))\n\n self.connection.rollback()",
"def rollback(gandi, resource, background):\n \"\"\" Rollback a disk from a snapshot. \"\"\"\n result = gandi.disk.rollback(resource, background)\n\n if background:\n gandi.pretty_echo(result)\n return result",
"private function rollback()\n {\n // We only need to rollback if we are in a transaction. Otherwise the resulting\n // error would hide the real problem why rollback was called. We might not be\n // in a transaction when not using the transactional locking behavior or when\n // two callbacks (e.g. destroy and write) are invoked that both fail.\n if ($this->inTransaction) {\n if ('sqlite' === $this->driver) {\n $this->pdo->exec('ROLLBACK');\n } else {\n $this->pdo->rollBack();\n }\n $this->inTransaction = false;\n }\n }",
"def rollback(self):\n \"\"\" Do journal rollback \"\"\"\n\n # Close the journal for writing, if this is an automatic rollback following a crash,\n # the file descriptor will not be open, so don't need to do anything.\n if self.journal != None: self.journal.close()\n self.journal = None\n\n # Read the journal\n journ_list = []\n with open(self.j_file) as fle:\n for l in fle: journ_list.append(json.loads(l))\n\n journ_subtract = deque(reversed(journ_list))\n\n for j_itm in reversed(journ_list):\n try: self.do_action({'do' : j_itm}, False)\n except IOError: pass\n\n # As each item is completed remove it from the journal file, in case\n # something fails during the rollback we can pick up where it stopped.\n journ_subtract.popleft()\n with open(self.j_file, 'w') as f:\n for data in list(journ_subtract):\n f.write(json.dumps(data) + \"\\n\")\n f.flush()\n\n # Rollback is complete so delete the journal file\n os.remove(self.j_file)"
] |
[
0.7328417301177979,
0.731033980846405,
0.7217168807983398,
0.7200501561164856,
0.7126035094261169,
0.7117496132850647,
0.7112213373184204,
0.7069525718688965,
0.7067874670028687,
0.7057897448539734,
0.7046284079551697,
0.7040011286735535
] |
Takes 3 things, an entering function (what to do to start this block of
transaction management), an exiting function (what to do to end it, on both
success and failure, and using which can be: None, indiciating transaction
should occur on all defined servers, or a callable, indicating that using
is None and to return the function already wrapped.
Returns either a Transaction objects, which is both a decorator and a
context manager, or a wrapped function, if using is a callable.
|
def _transaction_func(entering, exiting, using):
"""
Takes 3 things, an entering function (what to do to start this block of
transaction management), an exiting function (what to do to end it, on both
success and failure, and using which can be: None, indiciating transaction
should occur on all defined servers, or a callable, indicating that using
is None and to return the function already wrapped.
Returns either a Transaction objects, which is both a decorator and a
context manager, or a wrapped function, if using is a callable.
"""
# Note that although the first argument is *called* `using`, it
# may actually be a function; @autocommit and @autocommit('foo')
# are both allowed forms.
if callable(using):
return Transaction(entering, exiting, None)(using)
return Transaction(entering, exiting, using)
|
[
"def enter_transaction_management(using=None):\n \"\"\"\n Enters transaction management for a running thread. It must be balanced\n with the appropriate leave_transaction_management call, since the actual\n state is managed as a stack.\n\n The state and dirty flag are carried over from the surrounding block or\n from the settings, if there is no surrounding block (dirty is always false\n when no current block is running).\n \"\"\"\n if using is None:\n for using in tldap.backend.connections:\n connection = tldap.backend.connections[using]\n connection.enter_transaction_management()\n return\n connection = tldap.backend.connections[using]\n connection.enter_transaction_management()",
"def transaction_manager(fn):\n \"\"\"\n Decorator which wraps whole function into ``with transaction.manager:``.\n \"\"\"\n @wraps(fn)\n def transaction_manager_decorator(*args, **kwargs):\n with transaction.manager:\n return fn(*args, **kwargs)\n\n return transaction_manager_decorator",
"def commit_manually(using=None):\n \"\"\"\n Decorator that activates manual transaction control. It just disables\n automatic transaction control and doesn't do any commit/rollback of its\n own -- it's up to the user to call the commit and rollback functions\n themselves.\n \"\"\"\n def entering(using):\n enter_transaction_management(using=using)\n\n def exiting(exc_value, using):\n leave_transaction_management(using=using)\n\n return _transaction_func(entering, exiting, using)",
"def commit_on_success(using=None):\n \"\"\"\n This decorator activates commit on response. This way, if the view function\n runs successfully, a commit is made; if the viewfunc produces an exception,\n a rollback is made. This is one of the most common ways to do transaction\n control in Web apps.\n \"\"\"\n def entering(using):\n enter_transaction_management(using=using)\n\n def exiting(exc_value, using):\n try:\n if exc_value is not None:\n if is_dirty(using=using):\n rollback(using=using)\n else:\n commit(using=using)\n finally:\n leave_transaction_management(using=using)\n\n return _transaction_func(entering, exiting, using)",
"def transaction(self, connection=None, **kwargs):\n \"\"\"\n a simple context manager useful for when you want to wrap a bunch of db calls in a transaction\n http://docs.python.org/2/library/contextlib.html\n http://docs.python.org/release/2.5/whatsnew/pep-343.html\n\n example --\n with self.transaction()\n # do a bunch of calls\n # those db calls will be committed by this line\n \"\"\"\n with self.connection(connection) as connection:\n name = connection.transaction_name()\n connection.transaction_start(name)\n try:\n yield connection\n connection.transaction_stop()\n\n except Exception as e:\n connection.transaction_fail(name)\n self.raise_error(e)",
"def transaction(self,implicit = False):\n \"\"\"\n This returns a context guard which will automatically open and close a transaction\n \"\"\"\n\n class TransactionManager(object):\n\n def __init__(self,backend,implicit = False):\n self.backend = backend\n self.implicit = implicit\n\n def __enter__(self):\n self.within_transaction = True if self.backend.current_transaction else False\n self.transaction = self.backend.begin()\n\n def __exit__(self,exc_type,exc_value,traceback_obj):\n if exc_type:\n self.backend.rollback(self.transaction)\n return False\n else:\n #if the transaction has been created implicitly and we are not within\n #another transaction, we leave it open (the user needs to call commit manually)\n #if self.implicit and not self.within_transaction:\n # return\n self.backend.commit(self.transaction)\n\n return TransactionManager(self,implicit = implicit)",
"def transactional(func, args, kwds, **options):\n \"\"\"Decorator to make a function automatically run in a transaction.\n\n Args:\n **ctx_options: Transaction options (see transaction(), but propagation\n default to TransactionOptions.ALLOWED).\n\n This supports two forms:\n\n (1) Vanilla:\n @transactional\n def callback(arg):\n ...\n\n (2) With options:\n @transactional(retries=1)\n def callback(arg):\n ...\n \"\"\"\n return transactional_async.wrapped_decorator(\n func, args, kwds, **options).get_result()",
"def Transactional(fn, self, *argv, **argd):\n \"\"\"\n Decorator that wraps DAO methods to handle transactions automatically.\n\n It may only work with subclasses of L{BaseDAO}.\n \"\"\"\n return self._transactional(fn, *argv, **argd)",
"def transactional_tasklet(func, args, kwds, **options):\n \"\"\"The async version of @ndb.transaction.\n\n Will return the result of the wrapped function as a Future.\n \"\"\"\n from . import tasklets\n func = tasklets.tasklet(func)\n return transactional_async.wrapped_decorator(func, args, kwds, **options)",
"def transaction(self, mode=\"w\"):\n \"\"\"\n Create a new :class:`~bloop.transactions.ReadTransaction` or :class:`~bloop.transactions.WriteTransaction`.\n\n As a context manager, calling commit when the block exits:\n\n .. code-block:: pycon\n\n >>> engine = Engine()\n >>> user = User(id=3, email=\"user@domain.com\")\n >>> tweet = Tweet(id=42, data=\"hello, world\")\n >>> with engine.transaction(\"w\") as tx:\n ... tx.delete(user)\n ... tx.save(tweet, condition=Tweet.id.is_(None))\n\n Or manually calling prepare and commit:\n\n .. code-block:: pycon\n\n >>> engine = Engine()\n >>> user = User(id=3, email=\"user@domain.com\")\n >>> tweet = Tweet(id=42, data=\"hello, world\")\n >>> tx = engine.transaction(\"w\")\n >>> tx.delete(user)\n >>> tx.save(tweet, condition=Tweet.id.is_(None))\n >>> tx.prepare().commit()\n\n :param str mode: Either \"r\" or \"w\" to create a ReadTransaction or WriteTransaction. Default is \"w\"\n :return: A new transaction that can be committed.\n :rtype: :class:`~bloop.transactions.ReadTransaction` or :class:`~bloop.transactions.WriteTransaction`\n \"\"\"\n if mode == \"r\":\n cls = ReadTransaction\n elif mode == \"w\":\n cls = WriteTransaction\n else:\n raise ValueError(f\"unknown mode {mode}\")\n return cls(self)",
"def transactional(func):\n \"\"\"\n Decorate a function call with a commit/rollback and pass the session as the first arg.\n\n \"\"\"\n @wraps(func)\n def wrapper(*args, **kwargs):\n with transaction():\n return func(*args, **kwargs)\n return wrapper",
"def run_in_transaction(self, func, *args, **kw):\n \"\"\"Perform a unit of work in a transaction, retrying on abort.\n\n :type func: callable\n :param func: takes a required positional argument, the transaction,\n and additional positional / keyword arguments as supplied\n by the caller.\n\n :type args: tuple\n :param args: additional positional arguments to be passed to ``func``.\n\n :type kw: dict\n :param kw: optional keyword arguments to be passed to ``func``.\n If passed, \"timeout_secs\" will be removed and used to\n override the default timeout.\n\n :rtype: :class:`datetime.datetime`\n :returns: timestamp of committed transaction\n \"\"\"\n # Sanity check: Is there a transaction already running?\n # If there is, then raise a red flag. Otherwise, mark that this one\n # is running.\n if getattr(self._local, \"transaction_running\", False):\n raise RuntimeError(\"Spanner does not support nested transactions.\")\n self._local.transaction_running = True\n\n # Check out a session and run the function in a transaction; once\n # done, flip the sanity check bit back.\n try:\n with SessionCheckout(self._pool) as session:\n return session.run_in_transaction(func, *args, **kw)\n finally:\n self._local.transaction_running = False"
] |
[
0.7809832692146301,
0.7770252227783203,
0.7619637846946716,
0.7583186626434326,
0.7478051781654358,
0.7470187544822693,
0.7456947565078735,
0.7369341850280762,
0.7086473107337952,
0.7082058787345886,
0.7039930820465088,
0.7035502791404724
] |
This decorator activates commit on response. This way, if the view function
runs successfully, a commit is made; if the viewfunc produces an exception,
a rollback is made. This is one of the most common ways to do transaction
control in Web apps.
|
def commit_on_success(using=None):
"""
This decorator activates commit on response. This way, if the view function
runs successfully, a commit is made; if the viewfunc produces an exception,
a rollback is made. This is one of the most common ways to do transaction
control in Web apps.
"""
def entering(using):
enter_transaction_management(using=using)
def exiting(exc_value, using):
try:
if exc_value is not None:
if is_dirty(using=using):
rollback(using=using)
else:
commit(using=using)
finally:
leave_transaction_management(using=using)
return _transaction_func(entering, exiting, using)
|
[
"def autocommit(f):\n \"A decorator to commit to the storage if autocommit is set to True.\"\n @wraps(f)\n def wrapper(self, *args, **kwargs):\n result = f(self, *args, **kwargs)\n if self._meta.commit_ready():\n self.commit()\n return result\n return wrapper",
"def commit_manually(using=None):\n \"\"\"\n Decorator that activates manual transaction control. It just disables\n automatic transaction control and doesn't do any commit/rollback of its\n own -- it's up to the user to call the commit and rollback functions\n themselves.\n \"\"\"\n def entering(using):\n enter_transaction_management(using=using)\n\n def exiting(exc_value, using):\n leave_transaction_management(using=using)\n\n return _transaction_func(entering, exiting, using)",
"def commit(using=None):\n \"\"\"\n Possibility of calling transaction.commit() in new Django versions (in atomic block).\n \"\"\"\n try:\n django.db.transaction.commit(using)\n except django.db.transaction.TransactionManagementError:\n pass",
"def commitreturn(self, cursor, qstring, vals=()):\n \"careful: don't pass cursor (it's from decorator)\"\n cursor.execute(qstring, vals)\n return cursor.fetchall()[0]",
"def commit(self, commit):\n '''\n .. seealso:: :attr:`commit`\n '''\n\n c = self.commit\n if c:\n if not commit:\n commit = c[0]\n if commit in c:\n self._checkout(treeish=commit)",
"def commit(self):\n \"\"\"\n Commit this transaction.\n \"\"\"\n\n if not self._parent._is_active:\n raise exc.InvalidRequestError(\"This transaction is inactive\")\n yield from self._do_commit()\n self._is_active = False",
"def commit(self) -> ResponseCommit:\n \"\"\"Return the current encode state value to tendermint\"\"\"\n hash = struct.pack('>Q', self.txCount)\n return ResponseCommit(data=hash)",
"def _commit(self):\n \"\"\"\n :return: (dict) Response object content\n \"\"\"\n assert self.uri is not None, Exception(\"BadArgument: uri property cannot be None\")\n url = '{}/{}'.format(self.uri, self.__class__.__name__)\n serialized_json = jsonpickle.encode(self, unpicklable=False, )\n headers = {'Content-Type': 'application/json', 'Content-Length': str(len(serialized_json))}\n response = Http.post(url=url, data=serialized_json, headers=headers)\n if response.status_code != 200:\n from ArubaCloud.base.Errors import MalformedJsonRequest\n raise MalformedJsonRequest(\"Request: {}, Status Code: {}\".format(serialized_json, response.status_code))\n content = jsonpickle.decode(response.content.decode(\"utf-8\"))\n if content['ResultCode'] == 17:\n from ArubaCloud.base.Errors import OperationAlreadyEnqueued\n raise OperationAlreadyEnqueued(\"{} already enqueued\".format(self.__class__.__name__))\n if content['Success'] is False:\n from ArubaCloud.base.Errors import RequestFailed\n raise RequestFailed(\"Request: {}, Response: {}\".format(serialized_json, response.content))\n return content",
"def commit(self, message, author, parents=None, branch=None, date=None,\n **kwargs):\n \"\"\"\n Performs in-memory commit (doesn't check workdir in any way) and\n returns newly created ``Changeset``. Updates repository's\n ``revisions``.\n\n :param message: message of the commit\n :param author: full username, i.e. \"Joe Doe <joe.doe@example.com>\"\n :param parents: single parent or sequence of parents from which commit\n would be derieved\n :param date: ``datetime.datetime`` instance. Defaults to\n ``datetime.datetime.now()``.\n :param branch: branch name, as string. If none given, default backend's\n branch would be used.\n\n :raises ``CommitError``: if any error occurs while committing\n \"\"\"\n self.check_integrity(parents)\n\n from .repository import MercurialRepository\n if not isinstance(message, unicode) or not isinstance(author, unicode):\n raise RepositoryError('Given message and author needs to be '\n 'an <unicode> instance got %r & %r instead'\n % (type(message), type(author)))\n\n if branch is None:\n branch = MercurialRepository.DEFAULT_BRANCH_NAME\n kwargs['branch'] = branch\n\n def filectxfn(_repo, memctx, path):\n \"\"\"\n Marks given path as added/changed/removed in a given _repo. This is\n for internal mercurial commit function.\n \"\"\"\n\n # check if this path is removed\n if path in (node.path for node in self.removed):\n # Raising exception is a way to mark node for removal\n raise IOError(errno.ENOENT, '%s is deleted' % path)\n\n # check if this path is added\n for node in self.added:\n if node.path == path:\n return memfilectx(path=node.path,\n data=(node.content.encode('utf8')\n if not node.is_binary else node.content),\n islink=False,\n isexec=node.is_executable,\n copied=False)\n\n # or changed\n for node in self.changed:\n if node.path == path:\n return memfilectx(path=node.path,\n data=(node.content.encode('utf8')\n if not node.is_binary else node.content),\n islink=False,\n isexec=node.is_executable,\n copied=False)\n\n raise RepositoryError(\"Given path haven't been marked as added,\"\n \"changed or removed (%s)\" % path)\n\n parents = [None, None]\n for i, parent in enumerate(self.parents):\n if parent is not None:\n parents[i] = parent._ctx.node()\n\n if date and isinstance(date, datetime.datetime):\n date = date.ctime()\n\n commit_ctx = memctx(repo=self.repository._repo,\n parents=parents,\n text='',\n files=self.get_paths(),\n filectxfn=filectxfn,\n user=author,\n date=date,\n extra=kwargs)\n\n loc = lambda u: tolocal(u.encode('utf-8'))\n\n # injecting given _repo params\n commit_ctx._text = loc(message)\n commit_ctx._user = loc(author)\n commit_ctx._date = date\n\n # TODO: Catch exceptions!\n n = self.repository._repo.commitctx(commit_ctx)\n # Returns mercurial node\n self._commit_ctx = commit_ctx # For reference\n # Update vcs repository object & recreate mercurial _repo\n # new_ctx = self.repository._repo[node]\n # new_tip = self.repository.get_changeset(new_ctx.hex())\n new_id = hex(n)\n self.repository.revisions.append(new_id)\n self._repo = self.repository._get_repo(create=False)\n self.repository.branches = self.repository._get_branches()\n tip = self.repository.get_changeset()\n self.reset()\n return tip",
"def commit(func):\n '''Used as a decorator for automatically making session commits'''\n def wrap(**kwarg):\n with session_withcommit() as session:\n a = func(**kwarg)\n session.add(a)\n return session.query(songs).order_by(\n songs.song_id.desc()).first().song_id\n return wrap",
"def forcecommit(self):\n '''\n Note:\n This method may raise a MapFullError\n '''\n if not self.dirty:\n return False\n\n # ok... lets commit and re-open\n self._finiCoXact()\n self._initCoXact()\n return True",
"def commit():\n \"\"\" Commit changes and release the write lock \"\"\"\n\n session_token = request.headers['session_token']\n repository = request.headers['repository']\n\n #===\n current_user = have_authenticated_user(request.environ['REMOTE_ADDR'], repository, session_token)\n if current_user is False: return fail(user_auth_fail_msg)\n\n #===\n repository_path = config['repositories'][repository]['path']\n\n def with_exclusive_lock():\n if not varify_user_lock(repository_path, session_token): return fail(lock_fail_msg)\n\n #===\n data_store = versioned_storage(repository_path)\n if not data_store.have_active_commit(): return fail(no_active_commit_msg)\n\n result = {}\n if request.headers['mode'] == 'commit':\n new_head = data_store.commit(request.headers['commit_message'], current_user['username'])\n result = {'head' : new_head}\n else:\n data_store.rollback()\n\n # Release the user lock\n update_user_lock(repository_path, None)\n return success(result)\n return lock_access(repository_path, with_exclusive_lock)"
] |
[
0.7192099094390869,
0.7120048403739929,
0.705969512462616,
0.7059608697891235,
0.7056255340576172,
0.7001574635505676,
0.6987271904945374,
0.6951708793640137,
0.6897431015968323,
0.6896659135818481,
0.6870453953742981,
0.6861870288848877
] |
Decorator that activates manual transaction control. It just disables
automatic transaction control and doesn't do any commit/rollback of its
own -- it's up to the user to call the commit and rollback functions
themselves.
|
def commit_manually(using=None):
"""
Decorator that activates manual transaction control. It just disables
automatic transaction control and doesn't do any commit/rollback of its
own -- it's up to the user to call the commit and rollback functions
themselves.
"""
def entering(using):
enter_transaction_management(using=using)
def exiting(exc_value, using):
leave_transaction_management(using=using)
return _transaction_func(entering, exiting, using)
|
[
"def commit_on_success(using=None):\n \"\"\"\n This decorator activates commit on response. This way, if the view function\n runs successfully, a commit is made; if the viewfunc produces an exception,\n a rollback is made. This is one of the most common ways to do transaction\n control in Web apps.\n \"\"\"\n def entering(using):\n enter_transaction_management(using=using)\n\n def exiting(exc_value, using):\n try:\n if exc_value is not None:\n if is_dirty(using=using):\n rollback(using=using)\n else:\n commit(using=using)\n finally:\n leave_transaction_management(using=using)\n\n return _transaction_func(entering, exiting, using)",
"def Transactional(fn, self, *argv, **argd):\n \"\"\"\n Decorator that wraps DAO methods to handle transactions automatically.\n\n It may only work with subclasses of L{BaseDAO}.\n \"\"\"\n return self._transactional(fn, *argv, **argd)",
"def transactional(func, args, kwds, **options):\n \"\"\"Decorator to make a function automatically run in a transaction.\n\n Args:\n **ctx_options: Transaction options (see transaction(), but propagation\n default to TransactionOptions.ALLOWED).\n\n This supports two forms:\n\n (1) Vanilla:\n @transactional\n def callback(arg):\n ...\n\n (2) With options:\n @transactional(retries=1)\n def callback(arg):\n ...\n \"\"\"\n return transactional_async.wrapped_decorator(\n func, args, kwds, **options).get_result()",
"def autocommit(f):\n \"A decorator to commit to the storage if autocommit is set to True.\"\n @wraps(f)\n def wrapper(self, *args, **kwargs):\n result = f(self, *args, **kwargs)\n if self._meta.commit_ready():\n self.commit()\n return result\n return wrapper",
"def maybe_transactional(func):\n \"\"\"\n Variant of `transactional` that will not commit if there's an argument `commit` with a falsey value.\n\n Useful for dry-run style operations.\n\n \"\"\"\n @wraps(func)\n def wrapper(*args, **kwargs):\n commit = kwargs.get(\"commit\", True)\n with transaction(commit=commit):\n return func(*args, **kwargs)\n return wrapper",
"def transaction(self,implicit = False):\n \"\"\"\n This returns a context guard which will automatically open and close a transaction\n \"\"\"\n\n class TransactionManager(object):\n\n def __init__(self,backend,implicit = False):\n self.backend = backend\n self.implicit = implicit\n\n def __enter__(self):\n self.within_transaction = True if self.backend.current_transaction else False\n self.transaction = self.backend.begin()\n\n def __exit__(self,exc_type,exc_value,traceback_obj):\n if exc_type:\n self.backend.rollback(self.transaction)\n return False\n else:\n #if the transaction has been created implicitly and we are not within\n #another transaction, we leave it open (the user needs to call commit manually)\n #if self.implicit and not self.within_transaction:\n # return\n self.backend.commit(self.transaction)\n\n return TransactionManager(self,implicit = implicit)",
"def transaction(self, mode=\"w\"):\n \"\"\"\n Create a new :class:`~bloop.transactions.ReadTransaction` or :class:`~bloop.transactions.WriteTransaction`.\n\n As a context manager, calling commit when the block exits:\n\n .. code-block:: pycon\n\n >>> engine = Engine()\n >>> user = User(id=3, email=\"user@domain.com\")\n >>> tweet = Tweet(id=42, data=\"hello, world\")\n >>> with engine.transaction(\"w\") as tx:\n ... tx.delete(user)\n ... tx.save(tweet, condition=Tweet.id.is_(None))\n\n Or manually calling prepare and commit:\n\n .. code-block:: pycon\n\n >>> engine = Engine()\n >>> user = User(id=3, email=\"user@domain.com\")\n >>> tweet = Tweet(id=42, data=\"hello, world\")\n >>> tx = engine.transaction(\"w\")\n >>> tx.delete(user)\n >>> tx.save(tweet, condition=Tweet.id.is_(None))\n >>> tx.prepare().commit()\n\n :param str mode: Either \"r\" or \"w\" to create a ReadTransaction or WriteTransaction. Default is \"w\"\n :return: A new transaction that can be committed.\n :rtype: :class:`~bloop.transactions.ReadTransaction` or :class:`~bloop.transactions.WriteTransaction`\n \"\"\"\n if mode == \"r\":\n cls = ReadTransaction\n elif mode == \"w\":\n cls = WriteTransaction\n else:\n raise ValueError(f\"unknown mode {mode}\")\n return cls(self)",
"def atomic(func):\n \"\"\"\n Decorator helper that overrides django atomic decorator and automatically adds create revision.\n \"\"\"\n try:\n from reversion.revisions import create_revision\n\n return transaction.atomic(create_revision()(func))\n except ImportError:\n return transaction.atomic(func)",
"def transactional_async(func, args, kwds, **options):\n \"\"\"The async version of @ndb.transaction.\"\"\"\n options.setdefault('propagation', datastore_rpc.TransactionOptions.ALLOWED)\n if args or kwds:\n return transaction_async(lambda: func(*args, **kwds), **options)\n return transaction_async(func, **options)",
"def start_transaction(self, read_concern=None, write_concern=None,\n read_preference=None):\n \"\"\"Start a multi-statement transaction.\n\n Takes the same arguments as\n :class:`~pymongo.client_session.TransactionOptions`.\n\n Best used in a context manager block:\n\n .. code-block:: python3\n\n # Use \"await\" for start_session, but not for start_transaction.\n async with await client.start_session() as s:\n async with s.start_transaction():\n await collection.delete_one({'x': 1}, session=s)\n await collection.insert_one({'x': 2}, session=s)\n\n \"\"\"\n self.delegate.start_transaction(read_concern=read_concern,\n write_concern=write_concern,\n read_preference=read_preference)\n return _MotorTransactionContext(self)",
"def _transaction_func(entering, exiting, using):\n \"\"\"\n Takes 3 things, an entering function (what to do to start this block of\n transaction management), an exiting function (what to do to end it, on both\n success and failure, and using which can be: None, indiciating transaction\n should occur on all defined servers, or a callable, indicating that using\n is None and to return the function already wrapped.\n\n Returns either a Transaction objects, which is both a decorator and a\n context manager, or a wrapped function, if using is a callable.\n \"\"\"\n # Note that although the first argument is *called* `using`, it\n # may actually be a function; @autocommit and @autocommit('foo')\n # are both allowed forms.\n if callable(using):\n return Transaction(entering, exiting, None)(using)\n return Transaction(entering, exiting, using)",
"def non_transactional(func, args, kwds, allow_existing=True):\n \"\"\"A decorator that ensures a function is run outside a transaction.\n\n If there is an existing transaction (and allow_existing=True), the\n existing transaction is paused while the function is executed.\n\n Args:\n allow_existing: If false, throw an exception if called from within\n a transaction. If true, temporarily re-establish the\n previous non-transactional context. Defaults to True.\n\n This supports two forms, similar to transactional().\n\n Returns:\n A wrapper for the decorated function that ensures it runs outside a\n transaction.\n \"\"\"\n from . import tasklets\n ctx = tasklets.get_context()\n if not ctx.in_transaction():\n return func(*args, **kwds)\n if not allow_existing:\n raise datastore_errors.BadRequestError(\n '%s cannot be called within a transaction.' % func.__name__)\n save_ctx = ctx\n while ctx.in_transaction():\n ctx = ctx._parent_context\n if ctx is None:\n raise datastore_errors.BadRequestError(\n 'Context without non-transactional ancestor')\n save_ds_conn = datastore._GetConnection()\n try:\n if hasattr(save_ctx, '_old_ds_conn'):\n datastore._SetConnection(save_ctx._old_ds_conn)\n tasklets.set_context(ctx)\n return func(*args, **kwds)\n finally:\n tasklets.set_context(save_ctx)\n datastore._SetConnection(save_ds_conn)"
] |
[
0.7432112097740173,
0.7334818243980408,
0.7209842205047607,
0.7146124839782715,
0.7028743028640747,
0.6992279887199402,
0.6944511532783508,
0.6935604810714722,
0.6925960779190063,
0.6875629425048828,
0.6853241920471191,
0.6841294765472412
] |
Yields:
tuple (line_number: int, offset: int, text: str, check: type)
|
def run(self) -> Generator[Tuple[int, int, str, type], None, None]:
"""
Yields:
tuple (line_number: int, offset: int, text: str, check: type)
"""
if is_test_file(self.filename):
self.load()
for func in self.all_funcs():
try:
for error in func.check_all():
yield (error.line_number, error.offset, error.text, Checker)
except ValidationError as error:
yield error.to_flake8(Checker)
|
[
"def generator_telling_position(self) -> Iterator[Tuple[str, int]]:\n \"\"\"\n Create a generate that iterates the whole content of the file or string, and also tells which offset is now.\n\n :return: An iterator iterating tuples, containing lines of the text stream,...\n separated by ``'\\\\n'`` or ``'\\\\r'``; and the offset (in bytes) of current line.\n \"\"\"\n stream = self.stream # In case that ``self.stream`` is changed.\n stream.seek(0)\n for line in stream:\n yield line, stream.tell()",
"async def rows(self, offs, size=None, iden=None):\n '''\n Yield a number of raw items from the CryoTank starting at a given offset.\n\n Args:\n offs (int): The index of the desired datum (starts at 0)\n size (int): The max number of items to yield.\n\n Yields:\n ((indx, bytes)): Index and msgpacked bytes.\n '''\n if iden is not None:\n self.setOffset(iden, offs)\n\n for i, (indx, byts) in enumerate(self._items.rows(offs)):\n\n if size is not None and i >= size:\n return\n\n yield indx, byts",
"def iter_lines(self, warn_only=False):\n \"\"\"yields stdout text, line by line.\"\"\"\n remain = \"\"\n for data in self.iter_content(LINE_CHUNK_SIZE, warn_only=True):\n line_break_found = data[-1] in (b\"\\n\", b\"\\r\")\n lines = data.decode(self.codec).splitlines()\n lines[0] = remain + lines[0]\n if not line_break_found:\n remain = lines.pop()\n for line in lines:\n yield line\n if remain:\n yield remain\n\n self._state = FINISHED\n if not warn_only:\n self.raise_for_error()",
"function withLineNumbers(text, offset, offendingLine) {\n if (typeof offendingLine !== 'number') {\n offendingLine = NaN;\n }\n\n var lines = text;\n if (typeof lines === 'string') {\n lines = lines.split('\\n');\n }\n\n return lines.map(function(line, i) {\n line = (i + offset) + ': ' + line;\n if (i === (offendingLine - 1)) {\n line = '\\x1B[31m' + line + '\\x1B[39m';\n } else {\n line = '\\x1B[90m' + line + '\\x1B[39m';\n }\n return line;\n }).join('\\n');\n }",
"def token_or_comment_or_line_generator(buffer):\n \"\"\"Generator that mixes tokens and lines, ordering them by line number\"\"\"\n tok_or_com_gen = token_or_comment_generator(buffer)\n line_gen = line_generator(buffer)\n\n tok_or_com = next(tok_or_com_gen, None)\n line = next(line_gen, None)\n\n while tok_or_com is not None or line is not None:\n if tok_or_com is None or (line is not None and\n tok_or_com.line_no > line.line_no):\n yield line\n line = next(line_gen, None)\n else:\n yield tok_or_com\n tok_or_com = next(tok_or_com_gen, None)",
"def readlines(self):\n \"\"\"A generator producing lines from the file.\"\"\"\n\n # If the file is not open, there's nothing to return\n if not self._fh:\n raise StopIteration\n\n at_eof = False\n while True:\n # Clean the buffer sometimes.\n if self._bufoffset > (self._maxreadsize / 2):\n self._buf = self._buf[self._bufoffset:]\n self._bufoffset = 0\n\n # Fill up the buffer if necessary.\n if len(self._buf) < self._maxreadsize:\n at_eof = not self._read(self._maxreadsize)\n\n # Look for the next line.\n try:\n next_newline = self._buf.index(\"\\n\", self._bufoffset)\n line = self._buf[self._bufoffset:next_newline]\n self._bufoffset = next_newline + 1\n # Save the current file offset for yielding and advance the file offset.\n offset = self._offset\n self._offset += len(line) + 1\n if self._longline:\n # This is the remaining chunk of a long line, we're not going\n # to yield it.\n self._longline = False\n else:\n yield line, offset\n\n except ValueError:\n # Reached the end of the buffer without finding any newlines.\n if not at_eof:\n # Line is longer than the half the buffer size? - Nope\n logger.warning(\"Skipping over longline at %s:%d\", self._path,\n self._offset)\n self._bufoffset = len(self._buf) - 1\n self._longline = True\n raise StopIteration",
"def get(self, line_number):\n \"\"\"Return the needle positions or None.\n\n :param int line_number: the number of the line\n :rtype: list\n :return: the needle positions for a specific line specified by\n :paramref:`line_number` or :obj:`None` if no were given\n \"\"\"\n if line_number not in self._get_cache:\n self._get_cache[line_number] = self._get(line_number)\n return self._get_cache[line_number]",
"def _make_chunk_iter(stream, limit, buffer_size):\n \"\"\"Helper for the line and chunk iter functions.\"\"\"\n if isinstance(stream, (bytes, bytearray, text_type)):\n raise TypeError('Passed a string or byte object instead of '\n 'true iterator or stream.')\n if not hasattr(stream, 'read'):\n for item in stream:\n if item:\n yield item\n return\n if not isinstance(stream, LimitedStream) and limit is not None:\n stream = LimitedStream(stream, limit)\n _read = stream.read\n while 1:\n item = _read(buffer_size)\n if not item:\n break\n yield item",
"def line_range(self, line_number):\n \"\"\"Return a slice for the given line number\"\"\"\n if line_number <= 0 or line_number > len(self.lines):\n raise IndexError('NOTE: Python file line numbers are offset by 1.')\n\n if line_number not in self.logical_lines:\n return slice(line_number, line_number + 1)\n else:\n start, stop, _ = self.logical_lines[line_number]\n return slice(start, stop)",
"def generator_starts_from(self, offset, whence: Optional[int] = 0) -> Iterator[str]:\n \"\"\"\n Create a generate that iterates the whole content of the file or string, starting from *offset* bytes.\n\n :param offset: Change the stream position to the given byte *offset*....\n *offset* is interpreted relative to the position indicated by *whence*. The default value for whence is ``SEEK_SET``.\n :param whence: Values for whence are:...\n - ``SEEK_SET`` or ``0`` – start of the stream (the default); *offset* should be zero or positive...\n - ``SEEK_CUR`` or ``1`` – current stream position; *offset* may be negative...\n - ``SEEK_END`` or ``2`` – end of the stream; *offset* is usually negative\n :return: An iterator iterating the lines of the text stream, separated by ``'\\\\n'`` or ``'\\\\r'``, starting...\n from given byte *offset*.\n \"\"\"\n\n stream = self.stream # In case that ``self.stream`` is changed.\n stream.seek(offset, whence)\n for line in stream:\n yield line",
"def _lineiter(self):\n ''' Iterate over a binary file-like object line by line. Each line is\n returned as a (line, line_ending) tuple. If the line does not fit\n into self.buffer_size, line_ending is empty and the rest of the line\n is returned with the next iteration.\n '''\n read = self.stream.read\n maxread, maxbuf = self.content_length, self.buffer_size\n buffer = b'' # buffer for the last (partial) line\n while 1:\n data = read(maxbuf if maxread < 0 else min(maxbuf, maxread))\n maxread -= len(data)\n lines = (buffer+data).splitlines(True)\n len_first_line = len(lines[0])\n # be sure that the first line does not become too big\n if len_first_line > self.buffer_size:\n # at the same time don't split a '\\r\\n' accidentally\n if (len_first_line == self.buffer_size+1 and lines[0].endswith(b'\\r\\n')):\n splitpos = self.buffer_size - 1\n else:\n splitpos = self.buffer_size\n lines[:1] = [lines[0][:splitpos],\n lines[0][splitpos:]]\n if data:\n buffer = lines[-1]\n lines = lines[:-1]\n for line in lines:\n if line.endswith(b'\\r\\n'):\n yield line[:-2], b'\\r\\n'\n elif line.endswith(b'\\n'):\n yield line[:-1], b'\\n'\n elif line.endswith(b'\\r'):\n yield line[:-1], b'\\r'\n else:\n yield line, b''\n if not data:\n break",
"def _GetLineNumbers(code_object):\n \"\"\"Generator for getting the line numbers of a code object.\n\n Args:\n code_object: the code object.\n\n Yields:\n The next line number in the code object.\n \"\"\"\n # Get the line number deltas, which are the odd number entries, from the\n # lnotab. See\n # https://svn.python.org/projects/python/branches/pep-0384/Objects/lnotab_notes.txt\n # In Python 3, this is just a byte array. In Python 2 it is a string so the\n # numerical values have to be extracted from the individual characters.\n if six.PY3:\n line_incrs = code_object.co_lnotab[1::2]\n else:\n line_incrs = (ord(c) for c in code_object.co_lnotab[1::2])\n current_line = code_object.co_firstlineno\n for line_incr in line_incrs:\n current_line += line_incr\n yield current_line"
] |
[
0.7129460573196411,
0.6868925094604492,
0.6831988096237183,
0.6774337291717529,
0.6772196292877197,
0.6769496202468872,
0.675317645072937,
0.672489583492279,
0.6683998107910156,
0.6676743030548096,
0.6662952303886414,
0.6629166603088379
] |
Reloads glitter URL patterns if page URLs change.
Avoids having to restart the server to recreate the glitter URLs being used by Django.
|
def process_request(self, request):
"""
Reloads glitter URL patterns if page URLs change.
Avoids having to restart the server to recreate the glitter URLs being used by Django.
"""
global _urlconf_pages
page_list = list(
Page.objects.exclude(glitter_app_name='').values_list('id', 'url').order_by('id')
)
with _urlconf_lock:
if page_list != _urlconf_pages:
glitter_urls = 'glitter.urls'
if glitter_urls in sys.modules:
importlib.reload(sys.modules[glitter_urls])
_urlconf_pages = page_list
|
[
"def urlpatterns(self):\n '''load and decorate urls from all modules\n then store it as cached property for less loading\n '''\n if not hasattr(self, '_urlspatterns'):\n urlpatterns = []\n # load all urls\n # support .urls file and urls_conf = 'elephantblog.urls' on default module\n # decorate all url patterns if is not explicitly excluded\n for mod in leonardo.modules:\n # TODO this not work\n if is_leonardo_module(mod):\n\n conf = get_conf_from_module(mod)\n\n if module_has_submodule(mod, 'urls'):\n urls_mod = import_module('.urls', mod.__name__)\n if hasattr(urls_mod, 'urlpatterns'):\n # if not public decorate all\n\n if conf['public']:\n urlpatterns += urls_mod.urlpatterns\n else:\n _decorate_urlconf(urls_mod.urlpatterns,\n require_auth)\n urlpatterns += urls_mod.urlpatterns\n # avoid circural dependency\n # TODO use our loaded modules instead this property\n from django.conf import settings\n for urls_conf, conf in six.iteritems(getattr(settings, 'MODULE_URLS', {})):\n # is public ?\n try:\n if conf['is_public']:\n urlpatterns += \\\n patterns('',\n url(r'', include(urls_conf)),\n )\n else:\n _decorate_urlconf(\n url(r'', include(urls_conf)),\n require_auth)\n urlpatterns += patterns('',\n url(r'', include(urls_conf)))\n except Exception as e:\n raise Exception('raised %s during loading %s' %\n (str(e), urls_conf))\n\n self._urlpatterns = urlpatterns\n\n return self._urlpatterns",
"def reload(pages)\n pages.each do |p|\n Jekyll.logger.info(\"Page #{p.name}, #{p.basename}, #{p.ext}, #{p.permalink}, #{p.url}\")\n msg = {\n :command => 'reload',\n :path => p.url,\n :liveCSS => true,\n }\n\n # TODO Add support for override URL?\n # See http://feedback.livereload.com/knowledgebase/articles/86220-preview-css-changes-against-a-live-site-then-uplo\n\n Jekyll.logger.debug(\"LiveReload:\", \"Reloading #{p.url}\")\n @websockets.each do |ws|\n ws.send(JSON.dump(msg))\n end\n end\n end",
"function reloadTabsMatchingUrl(url) {\n var patterns = [\n new MatchPattern(url),\n new MatchPattern(url + '#*')\n ];\n for each (var tab in tabs) {\n var matchesAny = false;\n for each (var pattern in patterns) {\n if (pattern.test(tab.url)) {\n matchesAny = true;\n break;\n }\n }\n if (matchesAny) {\n tab.reload();\n }\n }\n}",
"public static function reload() {\n\t\tif (empty(self::$_initialState)) {\n\n\t\t\tforeach ((array) Configure::read('Routing.prefixes') as $prefix) {\n\t\t\t\tself::addRule('Page.' . $prefix, array('self', '__isPage'), $prefix);\n\t\t\t}\n\n\t\t\tself::$_initialState = get_class_vars('Reveal');\n\t\t\treturn;\n\t\t}\n\t\tforeach (self::$_initialState as $key => $val) {\n\t\t\tif ($key != '_initialState' && isset(self::${$key})) {\n\t\t\t\tself::${$key} = $val;\n\t\t\t}\n\t\t}\n\t}",
"protected function reload()\n {\n $http = [\n 'scheme' => null,\n 'domain' => null\n ];\n\n $this->http = (array)$this->slice->atData('http', $http);\n $this->defaults = (array)$this->slice->atData('defaults');\n $this->methods = (array)$this->slice->atData('methods');\n $this->regex = (array)$this->slice->atData('regex');\n $this->path = $this->slice->atData('path');\n $this->regexPath = $this->regex();\n }",
"public void reloadPage() {\n\n Timer timer = new Timer() {\n\n @Override\n @SuppressWarnings(\"synthetic-access\")\n public void run() {\n\n Window.Location.assign(m_originalUrl);\n }\n };\n timer.schedule(150);\n\n }",
"def reloadGraphs(self) :\n \"reloads the graph list\"\n r = self.connection.session.get(self.graphsURL)\n data = r.json()\n if r.status_code == 200 :\n self.graphs = {}\n for graphData in data[\"graphs\"] :\n try :\n self.graphs[graphData[\"_key\"]] = GR.getGraphClass(graphData[\"_key\"])(self, graphData)\n except KeyError :\n self.graphs[graphData[\"_key\"]] = Graph(self, graphData)\n else :\n raise UpdateError(data[\"errorMessage\"], data)",
"def refine_get_urls(original):\n \"\"\"\n serve static files (and media files also)\n\n in production the webserver should serve requested\n static files itself and never let requests to /static/*\n and /media/* get to the django application.\n \"\"\"\n\n def get_urls():\n from django.conf.urls import url\n from django.conf import settings\n from django.contrib.staticfiles.urls import staticfiles_urlpatterns\n from django.views.static import serve\n if settings.DEBUG:\n return staticfiles_urlpatterns() + [\n url(r'^media/(?P<path>.*)$', serve, {\n 'document_root': settings.MEDIA_ROOT,\n }),\n ] + original()\n else:\n return original()\n return get_urls",
"def gtk_reload():\n \"\"\"Reload GTK2 themes.\"\"\"\n events = gtk.gdk.Event(gtk.gdk.CLIENT_EVENT)\n data = gtk.gdk.atom_intern(\"_GTK_READ_RCFILES\", False)\n events.data_format = 8\n events.send_event = True\n events.message_type = data\n events.send_clientmessage_toall()",
"def rehighlight(self):\n \"\"\"\n Rehighlight the entire document, may be slow.\n \"\"\"\n start = time.time()\n QtWidgets.QApplication.setOverrideCursor(\n QtGui.QCursor(QtCore.Qt.WaitCursor))\n try:\n super(SyntaxHighlighter, self).rehighlight()\n except RuntimeError:\n # cloned widget, no need to rehighlight the same document twice ;)\n pass\n QtWidgets.QApplication.restoreOverrideCursor()\n end = time.time()\n _logger().debug('rehighlight duration: %fs' % (end - start))",
"def _update_page_resources(*, page, font, font_key, procset):\n \"\"\"Update this page's fonts with a reference to the Glyphless font\"\"\"\n\n if '/Resources' not in page:\n page['/Resources'] = pikepdf.Dictionary({})\n resources = page['/Resources']\n try:\n fonts = resources['/Font']\n except KeyError:\n fonts = pikepdf.Dictionary({})\n if font_key is not None and font_key not in fonts:\n fonts[font_key] = font\n resources['/Font'] = fonts\n\n # Reassign /ProcSet to one that just lists everything - ProcSet is\n # obsolete and doesn't matter but recommended for old viewer support\n resources['/ProcSet'] = procset",
"def discover_glitter_apps(self):\n \"\"\"\n Find all the Glitter App configurations in the current project.\n \"\"\"\n for app_name in settings.INSTALLED_APPS:\n module_name = '{app_name}.glitter_apps'.format(app_name=app_name)\n try:\n glitter_apps_module = import_module(module_name)\n if hasattr(glitter_apps_module, 'apps'):\n self.glitter_apps.update(glitter_apps_module.apps)\n except ImportError:\n pass\n\n self.discovered = True"
] |
[
0.7137013673782349,
0.689215898513794,
0.6736341714859009,
0.661320686340332,
0.6490828394889832,
0.6484881639480591,
0.6462690234184265,
0.6458756327629089,
0.6435902118682861,
0.6427618265151978,
0.6419044733047485,
0.6411451101303101
] |
Execute all current and future payloads
Blocks and executes payloads until :py:meth:`stop` is called.
It is an error for any orphaned payload to return or raise.
|
def run(self):
"""
Execute all current and future payloads
Blocks and executes payloads until :py:meth:`stop` is called.
It is an error for any orphaned payload to return or raise.
"""
self._logger.info('runner started: %s', self)
try:
with self._lock:
assert not self.running.is_set() and self._stopped.is_set(), 'cannot re-run: %s' % self
self.running.set()
self._stopped.clear()
self._run()
except Exception:
self._logger.exception('runner aborted: %s', self)
raise
else:
self._logger.info('runner stopped: %s', self)
finally:
with self._lock:
self.running.clear()
self._stopped.set()
|
[
"async def _await_all(self):\n \"\"\"Async component of _run\"\"\"\n delay = 0.0\n # we run a top-level nursery that automatically reaps/cancels for us\n async with trio.open_nursery() as nursery:\n while self.running.is_set():\n await self._start_payloads(nursery=nursery)\n await trio.sleep(delay)\n delay = min(delay + 0.1, 1.0)\n # cancel the scope to cancel all payloads\n nursery.cancel_scope.cancel()",
"async def _run_payloads(self):\n \"\"\"Async component of _run\"\"\"\n delay = 0.0\n try:\n while self.running.is_set():\n await self._start_payloads()\n await self._reap_payloads()\n await asyncio.sleep(delay)\n delay = min(delay + 0.1, 1.0)\n except Exception:\n await self._cancel_payloads()\n raise",
"async def _start_payloads(self, nursery):\n \"\"\"Start all queued payloads\"\"\"\n with self._lock:\n for coroutine in self._payloads:\n nursery.start_soon(coroutine)\n self._payloads.clear()\n await trio.sleep(0)",
"async def _start_payloads(self):\n \"\"\"Start all queued payloads\"\"\"\n with self._lock:\n for coroutine in self._payloads:\n task = self.event_loop.create_task(coroutine())\n self._tasks.add(task)\n self._payloads.clear()\n await asyncio.sleep(0)",
"def run_payload(self, payload, *, flavour: ModuleType):\n \"\"\"Execute one payload after its runner is started and return its output\"\"\"\n return self.runners[flavour].run_payload(payload)",
"def _start_payloads(self):\n \"\"\"Start all queued payloads\"\"\"\n with self._lock:\n payloads = self._payloads.copy()\n self._payloads.clear()\n for subroutine in payloads:\n thread = CapturingThread(target=subroutine)\n thread.start()\n self._threads.add(thread)\n self._logger.debug('booted thread %s', thread)\n time.sleep(0)",
"async def _reap_payloads(self):\n \"\"\"Clean up all finished payloads\"\"\"\n for task in self._tasks.copy():\n if task.done():\n self._tasks.remove(task)\n if task.exception() is not None:\n raise task.exception()\n await asyncio.sleep(0)",
"def stop(self):\n \"\"\"Stop execution of all current and future payloads\"\"\"\n if not self.running.wait(0.2):\n return\n self._logger.debug('runner disabled: %s', self)\n with self._lock:\n self.running.clear()\n self._stopped.wait()",
"def execute(self, payload, *args, flavour: ModuleType, **kwargs):\n \"\"\"\n Synchronously run ``payload`` and provide its output\n\n If ``*args*`` and/or ``**kwargs`` are provided, pass them to ``payload`` upon execution.\n \"\"\"\n if args or kwargs:\n payload = functools.partial(payload, *args, **kwargs)\n return self._meta_runner.run_payload(payload, flavour=flavour)",
"def _reap_payloads(self):\n \"\"\"Clean up all finished payloads\"\"\"\n for thread in self._threads.copy():\n # CapturingThread.join will throw\n if thread.join(timeout=0):\n self._threads.remove(thread)\n self._logger.debug('reaped thread %s', thread)",
"def register_payload(self, *payloads, flavour: ModuleType):\n \"\"\"Queue one or more payload for execution after its runner is started\"\"\"\n for payload in payloads:\n self._logger.debug('registering payload %s (%s)', NameRepr(payload), NameRepr(flavour))\n self.runners[flavour].register_payload(payload)",
"async def _cancel_payloads(self):\n \"\"\"Cancel all remaining payloads\"\"\"\n for task in self._tasks:\n task.cancel()\n await asyncio.sleep(0)\n for task in self._tasks:\n while not task.done():\n await asyncio.sleep(0.1)\n task.cancel()"
] |
[
0.7552907466888428,
0.7415286302566528,
0.7319077849388123,
0.7161659598350525,
0.7152004837989807,
0.714405357837677,
0.7080373167991638,
0.7059256434440613,
0.7043203115463257,
0.6888946294784546,
0.6872673630714417,
0.6786566376686096
] |
Stop execution of all current and future payloads
|
def stop(self):
"""Stop execution of all current and future payloads"""
if not self.running.wait(0.2):
return
self._logger.debug('runner disabled: %s', self)
with self._lock:
self.running.clear()
self._stopped.wait()
|
[
"async def _cancel_payloads(self):\n \"\"\"Cancel all remaining payloads\"\"\"\n for task in self._tasks:\n task.cancel()\n await asyncio.sleep(0)\n for task in self._tasks:\n while not task.done():\n await asyncio.sleep(0.1)\n task.cancel()",
"def shutdown(self):\n \"\"\"Shutdown the accept loop and stop running payloads\"\"\"\n self._must_shutdown = True\n self._is_shutdown.wait()\n self._meta_runner.stop()",
"synchronized public void stop() {\n process = false;\n BlockEncodeRequest temp = new BlockEncodeRequest();\n temp.setAll(null, -1, -1, -1, -1, null);\n int count = frameThreadMap.size();\n for(int i = 0; i < count; i++) {\n unassignedEncodeRequests.add(temp);\n }\n }",
"def run(self):\n \"\"\"\n Execute all current and future payloads\n\n Blocks and executes payloads until :py:meth:`stop` is called.\n It is an error for any orphaned payload to return or raise.\n \"\"\"\n self._logger.info('runner started: %s', self)\n try:\n with self._lock:\n assert not self.running.is_set() and self._stopped.is_set(), 'cannot re-run: %s' % self\n self.running.set()\n self._stopped.clear()\n self._run()\n except Exception:\n self._logger.exception('runner aborted: %s', self)\n raise\n else:\n self._logger.info('runner stopped: %s', self)\n finally:\n with self._lock:\n self.running.clear()\n self._stopped.set()",
"protected function stopped()\n {\n $this->redis->zrem(Queue::redisKey($this->queue, 'running'), $this->payload);\n\n Stats::decr('running', 1);\n Stats::decr('running', 1, Queue::redisKey($this->queue, 'stats'));\n }",
"def stop_daemon(self, payload=None):\n \"\"\"Kill current processes and initiate daemon shutdown.\n\n The daemon will shut down after a last check on all killed processes.\n \"\"\"\n kill_signal = signals['9']\n self.process_handler.kill_all(kill_signal, True)\n self.running = False\n\n return {'message': 'Pueue daemon shutting down',\n 'status': 'success'}",
"protected function stopCallQueues()\n {\n $this->executeQueues = false;\n $this->inputs->stopExecution();\n $this->operations->stopExecution();\n $this->outputs->stopExecution();\n }",
"def stop(self) -> None:\n \"\"\"\n Stop the :class:`~lahja.endpoint.Endpoint` from receiving further events.\n \"\"\"\n if not self._running:\n return\n\n self._running = False\n self._receiving_queue.put_nowait((TRANSPARENT_EVENT, None))\n self._internal_queue.put_nowait((TRANSPARENT_EVENT, None))",
"def stop_execution(self):\n \"\"\"\n Triggers the stopping of the object.\n \"\"\"\n super(LoadDataset, self).stop_execution()\n self._loader = None\n self._iterator = None",
"async def _reap_payloads(self):\n \"\"\"Clean up all finished payloads\"\"\"\n for task in self._tasks.copy():\n if task.done():\n self._tasks.remove(task)\n if task.exception() is not None:\n raise task.exception()\n await asyncio.sleep(0)",
"async def _await_all(self):\n \"\"\"Async component of _run\"\"\"\n delay = 0.0\n # we run a top-level nursery that automatically reaps/cancels for us\n async with trio.open_nursery() as nursery:\n while self.running.is_set():\n await self._start_payloads(nursery=nursery)\n await trio.sleep(delay)\n delay = min(delay + 0.1, 1.0)\n # cancel the scope to cancel all payloads\n nursery.cancel_scope.cancel()",
"def _reap_payloads(self):\n \"\"\"Clean up all finished payloads\"\"\"\n for thread in self._threads.copy():\n # CapturingThread.join will throw\n if thread.join(timeout=0):\n self._threads.remove(thread)\n self._logger.debug('reaped thread %s', thread)"
] |
[
0.7745028138160706,
0.7574176788330078,
0.7350283861160278,
0.7333797216415405,
0.7153338193893433,
0.7145117521286011,
0.7136678099632263,
0.7098059058189392,
0.7093703746795654,
0.7081308364868164,
0.7078524231910706,
0.7044951319694519
] |
Delimit a string at word boundaries.
::
>>> import uqbar.strings
>>> list(uqbar.strings.delimit_words("i want to believe"))
['i', 'want', 'to', 'believe']
::
>>> list(uqbar.strings.delimit_words("S3Bucket"))
['S3', 'Bucket']
::
>>> list(uqbar.strings.delimit_words("Route53"))
['Route', '53']
|
def delimit_words(string: str) -> Generator[str, None, None]:
"""
Delimit a string at word boundaries.
::
>>> import uqbar.strings
>>> list(uqbar.strings.delimit_words("i want to believe"))
['i', 'want', 'to', 'believe']
::
>>> list(uqbar.strings.delimit_words("S3Bucket"))
['S3', 'Bucket']
::
>>> list(uqbar.strings.delimit_words("Route53"))
['Route', '53']
"""
# TODO: Reimplement this
wordlike_characters = ("<", ">", "!")
current_word = ""
for i, character in enumerate(string):
if (
not character.isalpha()
and not character.isdigit()
and character not in wordlike_characters
):
if current_word:
yield current_word
current_word = ""
elif not current_word:
current_word += character
elif character.isupper():
if current_word[-1].isupper():
current_word += character
else:
yield current_word
current_word = character
elif character.islower():
if current_word[-1].isalpha():
current_word += character
else:
yield current_word
current_word = character
elif character.isdigit():
if current_word[-1].isdigit() or current_word[-1].isupper():
current_word += character
else:
yield current_word
current_word = character
elif character in wordlike_characters:
if current_word[-1] in wordlike_characters:
current_word += character
else:
yield current_word
current_word = character
if current_word:
yield current_word
|
[
"def delimit_words(self):\n \"\"\"This method takes the existing encoded binary string\n and returns a binary string that will pad it such that\n the encoded string contains only full bytes.\n \"\"\"\n bits_short = 8 - (len(self.buffer.getvalue()) % 8)\n \n #The string already falls on an byte boundary do nothing\n if bits_short == 0 or bits_short == 8:\n return None\n else:\n return self.binary_string(0, bits_short)",
"def boundary_words(seq):\n '''\n Wraps all word transitions with a boundary token character (\\x00).\n\n Arguments:\n - `seq`:\n '''\n in_word = None\n for char in seq:\n if char == '\\x00' and in_word is not None:\n in_word = not in_word\n elif char in WHITESPACE_CHARS:\n if in_word is not None and in_word:\n yield '\\x00'\n in_word = False\n else:\n if in_word is not None and not in_word:\n yield '\\x00'\n in_word = True\n yield char",
"def _separate_words(string):\n \"\"\"\n Segment string on separator into list of words.\n\n Arguments:\n string -- the string we want to process\n\n Returns:\n words -- list of words the string got minced to\n separator -- the separator char intersecting words\n was_upper -- whether string happened to be upper-case\n \"\"\"\n words = []\n separator = \"\"\n\n # Index of current character. Initially 1 because we don't want to check\n # if the 0th character is a boundary.\n i = 1\n # Index of first character in a sequence\n s = 0\n # Previous character.\n p = string[0:1]\n\n # Treat an all-caps stringiable as lower-case, so that every letter isn't\n # counted as a boundary.\n was_upper = False\n if string.isupper():\n string = string.lower()\n was_upper = True\n\n # Iterate over each character, checking for boundaries, or places where\n # the stringiable should divided.\n while i <= len(string):\n c = string[i:i + 1]\n\n split = False\n if i < len(string):\n # Detect upper-case letter as boundary.\n if UPPER.match(c):\n split = True\n # Detect transition from separator to not separator.\n elif NOTSEP.match(c) and SEP.match(p):\n split = True\n # Detect transition not separator to separator.\n elif SEP.match(c) and NOTSEP.match(p):\n split = True\n else:\n # The loop goes one extra iteration so that it can handle the\n # remaining text after the last boundary.\n split = True\n\n if split:\n if NOTSEP.match(p):\n words.append(string[s:i])\n else:\n # stringiable contains at least one separator.\n # Use the first one as the stringiable's primary separator.\n if not separator:\n separator = string[s:s + 1]\n\n # Use None to indicate a separator in the word list.\n words.append(None)\n # If separators weren't included in the list, then breaks\n # between upper-case sequences (\"AAA_BBB\") would be\n # disregarded; the letter-run detector would count them as one\n # sequence (\"AAABBB\").\n s = i\n\n i += 1\n p = c\n\n return words, separator, was_upper",
"def _bpe_to_words(sentence, delimiter='@@'):\n \"\"\"Convert a sequence of bpe words into sentence.\"\"\"\n words = []\n word = ''\n delimiter_len = len(delimiter)\n for subwords in sentence:\n if len(subwords) >= delimiter_len and subwords[-delimiter_len:] == delimiter:\n word += subwords[:-delimiter_len]\n else:\n word += subwords\n words.append(word)\n word = ''\n return words",
"def boundary_transform(seq, force_edges = True):\n '''\n Wraps all word transitions with a boundary token character (\\x00).\n If desired (with ``force_edges`` set to ``True``), this inserts\n the boundary character at the beginning and end of the string.\n\n Arguments:\n - `seq`:\n - `force_edges = True`:\n '''\n gen = boundary_words(seq)\n if force_edges:\n gen = boundary_edges(gen)\n gen = remove_duplicates(gen)\n for char in gen:\n yield char",
"def word_slice(ctx, text, start, stop=0, by_spaces=False):\n \"\"\"\n Extracts a substring spanning from start up to but not-including stop\n \"\"\"\n text = conversions.to_string(text, ctx)\n start = conversions.to_integer(start, ctx)\n stop = conversions.to_integer(stop, ctx)\n by_spaces = conversions.to_boolean(by_spaces, ctx)\n\n if start == 0:\n raise ValueError(\"Start word cannot be zero\")\n elif start > 0:\n start -= 1 # convert to a zero-based offset\n\n if stop == 0: # zero is treated as no end\n stop = None\n elif stop > 0:\n stop -= 1 # convert to a zero-based offset\n\n words = __get_words(text, by_spaces)\n\n selection = operator.getitem(words, slice(start, stop))\n\n # re-combine selected words with a single space\n return ' '.join(selection)",
"def split(s, by=None, maxsplit=None):\n \"\"\"Split a string based on given delimiter(s).\n Delimiters can be either strings or compiled regular expression objects.\n\n :param s: String to split\n :param by: A delimiter, or iterable thereof.\n :param maxsplit: Maximum number of splits to perform.\n ``None`` means no limit,\n while 0 does not perform a split at all.\n\n :return: List of words in the string ``s``\n that were separated by delimiter(s)\n\n :raise ValueError: If the separator is an empty string or regex\n \"\"\"\n ensure_string(s)\n\n # TODO(xion): Consider introducing a case for ``split('')``\n # to make it return ``['']`` rather than default ``[]`` thru ``str.split``.\n # It's the so-called \"whitespace split\" that normally eliminates\n # empty strings from result. However, ``split(s)`` for any other ``s``\n # always returns ``[s]`` so these two approaches are at odds here.\n # (Possibly refer to split functions in other languages for comparison).\n\n # string delimiter are handled by appropriate standard function\n if by is None or is_string(by):\n return s.split(by) if maxsplit is None else s.split(by, maxsplit)\n\n # regex delimiters have certain special cases handled explicitly below,\n # so that we do the same things that ``str.split`` does\n if is_regex(by):\n if not by.pattern:\n return s.split('') # will fail with proper exception & message\n if maxsplit == 0:\n return [s]\n return by.split(s, maxsplit=maxsplit or 0)\n\n # multiple delimiters are handled by regex that matches them all\n if is_iterable(by):\n if not by:\n raise ValueError(\"empty separator list\")\n by = list(imap(ensure_string, by))\n if not s:\n return [''] # quickly eliminate trivial case\n or_ = s.__class__('|')\n regex = join(or_, imap(re.escape, by))\n return split(s, by=re.compile(regex), maxsplit=maxsplit)\n\n raise TypeError(\"invalid separator\")",
"def split_camel(word: str) -> str:\n \"\"\"\n Separate any words joined in Camel case fashion using a single space.\n >>> split_camel('esseCarthaginienses')\n 'esse Carthaginienses'\n >>> split_camel('urbemCertimam')\n 'urbem Certimam'\n \"\"\"\n m = re.match('[a-z]+[A-Z][a-z]', word)\n if m:\n _, end = m.span()\n return word[:end - 2] + ' ' + word[end - 2:]\n return word",
"def delimit(x: str, delims: Tuple[str, str]) -> str:\n \"\"\"Delimits x, using delims[0] (left) and delims[1] (right).\"\"\"\n return delims[0] + x + delims[1]",
"def _subspan(self, s, span, nextspan):\n \"\"\"Recursively subdivide spans based on a series of rules.\"\"\"\n\n # Split on boundaries between greek and non-greek\n text = s[span[0]:span[1]]\n for i, char in enumerate(text):\n if i < len(text) - 1:\n nextchar = text[i + 1]\n if (char in GREEK and nextchar not in GREEK) or (char not in GREEK and nextchar in GREEK):\n return [(span[0], span[0] + i + 1), (span[0] + i + 1, span[1])]\n\n # Perform all normal WordTokenizer splits\n return super(FineWordTokenizer, self)._subspan(s,span, nextspan)",
"def remove_stopwords(self, tokens):\n \"\"\"Remove all stopwords from a list of word tokens or a string of text.\"\"\"\n if isinstance(tokens, (list, tuple)):\n return [word for word in tokens if word.lower() not in self._stopwords]\n else:\n return ' '.join(\n [word for word in tokens.split(' ') if word.lower() not in self._stopwords]\n )",
"def delimit_slug(slug, sep=' '):\n \"\"\" Return a str of separated tokens found within a slugLike_This => 'slug Like This'\n\n >>> delimit_slug(\"slugLike_ThisW/aTLA's\")\n 'slug Like This W a TLA s'\n >>> delimit_slug('slugLike_ThisW/aTLA', '|')\n 'slug|Like|This|W|a|TLA'\n \"\"\"\n hyphenated_slug = re.sub(CRE_SLUG_DELIMITTER, sep, slug)\n return hyphenated_slug"
] |
[
0.6909090280532837,
0.6606740355491638,
0.6514214873313904,
0.6489845514297485,
0.6448297500610352,
0.6416976451873779,
0.6416348814964294,
0.6394431591033936,
0.6336376070976257,
0.6314685940742493,
0.6312682032585144,
0.6307927370071411
] |
Normalizes whitespace.
Strips leading and trailing blank lines, dedents, and removes trailing
whitespace from the result.
|
def normalize(string: str) -> str:
"""
Normalizes whitespace.
Strips leading and trailing blank lines, dedents, and removes trailing
whitespace from the result.
"""
string = string.replace("\t", " ")
lines = string.split("\n")
while lines and (not lines[0] or lines[0].isspace()):
lines.pop(0)
while lines and (not lines[-1] or lines[-1].isspace()):
lines.pop()
for i, line in enumerate(lines):
lines[i] = line.rstrip()
string = "\n".join(lines)
string = textwrap.dedent(string)
return string
|
[
"def normalize(text_block)\n return text_block if @options[:preserve_whitespace]\n\n # Strip out any preceding [whitespace]* that occur on every line. Not\n # the smartest, but I wonder if I care.\n text_block = text_block.gsub(/^(\\s*\\*+)/, '')\n\n # Strip consistent indenting by measuring first line's whitespace\n indent_size = nil\n unindented = text_block.split(\"\\n\").collect do |line|\n preceding_whitespace = line.scan(/^\\s*/)[0].to_s.size\n indent_size = preceding_whitespace if indent_size.nil?\n if line == \"\"\n \"\"\n elsif indent_size <= preceding_whitespace && indent_size > 0\n line.slice(indent_size, line.length - 1)\n else\n line\n end\n end.join(\"\\n\")\n\n unindented.strip\n end",
"def _dedentlines(lines, tabsize=8, skip_first_line=False):\n \"\"\"_dedentlines(lines, tabsize=8, skip_first_line=False) -> dedented lines\n\n \"lines\" is a list of lines to dedent.\n \"tabsize\" is the tab width to use for indent width calculations.\n \"skip_first_line\" is a boolean indicating if the first line should\n be skipped for calculating the indent width and for dedenting.\n This is sometimes useful for docstrings and similar.\n\n Same as dedent() except operates on a sequence of lines. Note: the\n lines list is modified **in-place**.\n \"\"\"\n DEBUG = False\n if DEBUG:\n print(\"dedent: dedent(..., tabsize=%d, skip_first_line=%r)\"\\\n % (tabsize, skip_first_line))\n indents = []\n margin = None\n for i, line in enumerate(lines):\n if i == 0 and skip_first_line: continue\n indent = 0\n for ch in line:\n if ch == ' ':\n indent += 1\n elif ch == '\\t':\n indent += tabsize - (indent % tabsize)\n elif ch in '\\r\\n':\n continue # skip all-whitespace lines\n else:\n break\n else:\n continue # skip all-whitespace lines\n if DEBUG: print(\"dedent: indent=%d: %r\" % (indent, line))\n if margin is None:\n margin = indent\n else:\n margin = min(margin, indent)\n if DEBUG: print(\"dedent: margin=%r\" % margin)\n\n if margin is not None and margin > 0:\n for i, line in enumerate(lines):\n if i == 0 and skip_first_line: continue\n removed = 0\n for j, ch in enumerate(line):\n if ch == ' ':\n removed += 1\n elif ch == '\\t':\n removed += tabsize - (removed % tabsize)\n elif ch in '\\r\\n':\n if DEBUG: print(\"dedent: %r: EOL -> strip up to EOL\" % line)\n lines[i] = lines[i][j:]\n break\n else:\n raise ValueError(\"unexpected non-whitespace char %r in \"\n \"line %r while removing %d-space margin\"\n % (ch, line, margin))\n if DEBUG:\n print(\"dedent: %r: %r -> removed %d/%d\"\\\n % (line, ch, removed, margin))\n if removed == margin:\n lines[i] = lines[i][j+1:]\n break\n elif removed > margin:\n lines[i] = ' '*(removed-margin) + lines[i][j+1:]\n break\n else:\n if removed:\n lines[i] = lines[i][removed:]\n return lines",
"def text_normalize(text):\n \"\"\"\n Whitespace normalization:\n\n - Strip empty lines\n - Strip leading whitespace in a line\n - Strip trailing whitespace in a line\n - Normalize line endings\n \"\"\"\n # if not isinstance(text, str):\n if isinstance(text, bytes):\n # -- MAYBE: command.ouput => bytes, encoded stream output.\n text = codecs.decode(text)\n lines = [ line.strip() for line in text.splitlines() if line.strip() ]\n return \"\\n\".join(lines)",
"public static String cleanWhitespace(String theResult) {\n\t\tStringBuilder b = new StringBuilder();\n\t\tboolean inWhitespace = false;\n\t\tboolean betweenTags = false;\n\t\tboolean lastNonWhitespaceCharWasTagEnd = false;\n\t\tboolean inPre = false;\n\t\tfor (int i = 0; i < theResult.length(); i++) {\n\t\t\tchar nextChar = theResult.charAt(i);\n\t\t\tif (inPre) {\n\t\t\t\tb.append(nextChar);\n\t\t\t\tcontinue;\n\t\t\t} else if (nextChar == '>') {\n\t\t\t\tb.append(nextChar);\n\t\t\t\tbetweenTags = true;\n\t\t\t\tlastNonWhitespaceCharWasTagEnd = true;\n\t\t\t\tcontinue;\n\t\t\t} else if (nextChar == '\\n' || nextChar == '\\r') {\n\t\t\t\tcontinue;\n\t\t\t}\n\n\t\t\tif (betweenTags) {\n\t\t\t\tif (Character.isWhitespace(nextChar)) {\n\t\t\t\t\tinWhitespace = true;\n\t\t\t\t} else if (nextChar == '<') {\n\t\t\t\t\tif (inWhitespace && !lastNonWhitespaceCharWasTagEnd) {\n\t\t\t\t\t\tb.append(' ');\n\t\t\t\t\t}\n\t\t\t\t\tb.append(nextChar);\n\t\t\t\t\tinWhitespace = false;\n\t\t\t\t\tbetweenTags = false;\n\t\t\t\t\tlastNonWhitespaceCharWasTagEnd = false;\n\t\t\t\t\tif (i + 3 < theResult.length()) {\n\t\t\t\t\t\tchar char1 = Character.toLowerCase(theResult.charAt(i + 1));\n\t\t\t\t\t\tchar char2 = Character.toLowerCase(theResult.charAt(i + 2));\n\t\t\t\t\t\tchar char3 = Character.toLowerCase(theResult.charAt(i + 3));\n\t\t\t\t\t\tchar char4 = Character.toLowerCase((i + 4 < theResult.length()) ? theResult.charAt(i + 4) : ' ');\n\t\t\t\t\t\tif (char1 == 'p' && char2 == 'r' && char3 == 'e') {\n\t\t\t\t\t\t\tinPre = true;\n\t\t\t\t\t\t} else if (char1 == '/' && char2 == 'p' && char3 == 'r' && char4 == 'e') {\n\t\t\t\t\t\t\tinPre = false;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t} else {\n\t\t\t\t\tlastNonWhitespaceCharWasTagEnd = false;\n\t\t\t\t\tif (inWhitespace) {\n\t\t\t\t\t\tb.append(' ');\n\t\t\t\t\t\tinWhitespace = false;\n\t\t\t\t\t}\n\t\t\t\t\tb.append(nextChar);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tb.append(nextChar);\n\t\t\t}\n\t\t}\n\t\treturn b.toString();\n\t}",
"def clean_text(self, text, preserve_space):\n \"\"\"Text cleaning as per https://www.w3.org/TR/SVG/text.html#WhiteSpace\n \"\"\"\n if text is None:\n return\n if preserve_space:\n text = text.replace('\\r\\n', ' ').replace('\\n', ' ').replace('\\t', ' ')\n else:\n text = text.replace('\\r\\n', '').replace('\\n', '').replace('\\t', ' ')\n text = text.strip()\n while (' ' in text):\n text = text.replace(' ', ' ')\n return text",
"def _dedent(text):\n \"\"\"Remove common indentation from each line in a text block.\n\n When text block is a single line, return text block. Otherwise\n determine common indentation from last line, strip common\n indentation from each line, and return text block consisting of\n inner lines (don't include first and last lines since they either\n empty or contain whitespace and are present in baselined\n string to make them pretty and delineate the common indentation).\n\n :param str text: text block\n :returns: text block with common indentation removed\n :rtype: str\n :raises ValueError: when text block violates whitespace rules\n\n \"\"\"\n lines = text.split('\\n')\n\n if len(lines) == 1:\n indent = 0\n\n elif lines[0].strip():\n raise ValueError('when multiple lines, first line must be blank')\n\n elif lines[-1].strip():\n raise ValueError('last line must only contain indent whitespace')\n\n else:\n indent = len(lines[-1])\n\n if any(line[:indent].strip() for line in lines):\n raise ValueError(\n 'indents must equal or exceed indent in last line')\n\n lines = [line[indent:] for line in lines][1:-1]\n\n return indent, '\\n'.join(lines)",
"def _clean_code(self, code):\n \"\"\" Return *code* with indentation and leading/trailing blank lines\n removed. \n \"\"\"\n lines = code.split(\"\\n\")\n min_indent = 100\n for line in lines:\n if line.strip() != \"\":\n indent = len(line) - len(line.lstrip())\n min_indent = min(indent, min_indent)\n if min_indent > 0:\n lines = [line[min_indent:] for line in lines]\n code = \"\\n\".join(lines)\n return code",
"def _delete_whitespace(self):\n \"\"\"Delete all whitespace from the end of the line.\"\"\"\n while isinstance(self._lines[-1], (self._Space, self._LineBreak,\n self._Indent)):\n del self._lines[-1]",
"def strip_leading_comments(text):\n \"\"\"Strips the leading whitespaces and % from the given text.\n\n Adapted from textwrap.dedent\n \"\"\"\n # Look for the longest leading string of spaces and tabs common to\n # all lines.\n margin = None\n text = _whitespace_only_re.sub('', text)\n indents = _leading_whitespace_re.findall(text)\n for indent in indents:\n if margin is None:\n margin = indent\n\n # Current line more deeply indented than previous winner:\n # no change (previous winner is still on top).\n elif indent.startswith(margin):\n pass\n\n # Current line consistent with and no deeper than previous winner:\n # it's the new winner.\n elif margin.startswith(indent):\n margin = indent\n\n # Current line and previous winner have no common whitespace:\n # there is no margin.\n else:\n margin = \"\"\n break\n\n # sanity check (testing/debugging only)\n if 0 and margin:\n for line in text.split(\"\\n\"):\n assert not line or line.startswith(margin), \\\n \"line = %r, margin = %r\" % (line, margin)\n\n if margin:\n text = re.sub(r'(?m)^' + margin, '', text)\n return text",
"def clean_whitespace(statement):\n \"\"\"\n Remove any consecutive whitespace characters from the statement text.\n \"\"\"\n import re\n\n # Replace linebreaks and tabs with spaces\n statement.text = statement.text.replace('\\n', ' ').replace('\\r', ' ').replace('\\t', ' ')\n\n # Remove any leeding or trailing whitespace\n statement.text = statement.text.strip()\n\n # Remove consecutive spaces\n statement.text = re.sub(' +', ' ', statement.text)\n\n return statement",
"function normalizeText(text, start, end, removeTrailingSemiColons) {\n var trimmedText;\n\n // Remove leading spaces\n trimmedText = _.trimLeft(text);\n\n if (trimmedText.length < text.length) {\n start += (text.length - trimmedText.length);\n }\n\n text = trimmedText;\n\n // Remove trailing spaces\n trimmedText = _.trimRight(text);\n\n if (trimmedText.length < text.length) {\n end -= (text.length - trimmedText.length);\n }\n\n text = trimmedText;\n\n // Remove trailing semicolons\n if (removeTrailingSemiColons) {\n trimmedText = _.trimRight(text, ';');\n\n if (trimmedText.length < text.length) {\n end -= (text.length - trimmedText.length);\n }\n }\n\n return {\n text: trimmedText,\n start: start,\n end: end\n };\n }",
"def tlg_plaintext_cleanup(text, rm_punctuation=False, rm_periods=False):\n \"\"\"Remove and substitute post-processing for Greek TLG text.\n TODO: Surely more junk to pull out. Please submit bugs!\n TODO: {.+?}|\\(.+?\\) working?\n TODO: This is a rather slow now, help in speeding up welcome.\n \"\"\"\n remove_comp = regex.compile(r'-\\n|«|»|<|>|\\.\\.\\.|‘|’|_|{.+?}|\\(.+?\\)|[a-zA-Z0-9]', flags=regex.VERSION1)\n text = remove_comp.sub('', text)\n\n new_text = None\n if rm_punctuation:\n new_text = ''\n punctuation = [',', '·', ':', '\"', \"'\", '?', '-', '!', '*', '[', ']', '{', '}']\n if rm_periods:\n punctuation += ['.', ';']\n for char in text:\n # second try at rming some punctuation; merge with above regex\n if char in punctuation:\n pass\n else:\n new_text += char\n if new_text:\n text = new_text\n\n # replace line breaks w/ space\n replace_comp = regex.compile(r'\\n')\n text = replace_comp.sub(' ', text)\n\n comp_space = regex.compile(r'\\s+')\n text = comp_space.sub(' ', text)\n\n return text"
] |
[
0.7407689094543457,
0.7230909466743469,
0.7103708386421204,
0.7006025910377502,
0.6976730823516846,
0.6974694728851318,
0.6950390934944153,
0.6949636936187744,
0.6920515298843384,
0.6885442733764648,
0.6881459355354309,
0.6814990043640137
] |
Convert a string to dash-delimited words.
::
>>> import uqbar.strings
>>> string = 'Tô Đặc Biệt Xe Lửa'
>>> print(uqbar.strings.to_dash_case(string))
to-dac-biet-xe-lua
::
>>> string = 'alpha.beta.gamma'
>>> print(uqbar.strings.to_dash_case(string))
alpha-beta-gamma
|
def to_dash_case(string: str) -> str:
"""
Convert a string to dash-delimited words.
::
>>> import uqbar.strings
>>> string = 'Tô Đặc Biệt Xe Lửa'
>>> print(uqbar.strings.to_dash_case(string))
to-dac-biet-xe-lua
::
>>> string = 'alpha.beta.gamma'
>>> print(uqbar.strings.to_dash_case(string))
alpha-beta-gamma
"""
string = unidecode.unidecode(string)
words = (_.lower() for _ in delimit_words(string))
string = "-".join(words)
return string
|
[
"def parse_dash(string, width):\n\t\"parse dash pattern specified with string\"\n\t\n\t# DashConvert from {tk-sources}/generic/tkCanvUtil.c\n\tw = max(1, int(width + 0.5))\n\n\tn = len(string)\n\tresult = []\n\tfor i, c in enumerate(string):\n\t\tif c == \" \" and len(result):\n\t\t\tresult[-1] += w + 1\n\t\telif c == \"_\":\n\t\t\tresult.append(8*w)\n\t\t\tresult.append(4*w)\n\t\telif c == \"-\":\n\t\t\tresult.append(6*w)\n\t\t\tresult.append(4*w)\n\t\telif c == \",\":\n\t\t\tresult.append(4*w)\n\t\t\tresult.append(4*w)\n\t\telif c == \".\":\n\t\t\tresult.append(2*w)\n\t\t\tresult.append(4*w)\n\treturn result",
"def dashrepl(value):\n \"\"\"\n Replace any non-word characters with a dash.\n \"\"\"\n patt = re.compile(r'\\W', re.UNICODE)\n return re.sub(patt, '-', value)",
"def classify(dashed_word)\n dashed_word.to_s.split('-').each { |part| part[0] = part[0].chr.upcase }.join\n end",
"function dashCase(str) {\n var newStr = '';\n\n for (var i = 0, len = str.length; i < len; i++) {\n if (str[i] === str[i].toUpperCase()) { newStr += '-'; }\n newStr += str[i].toLowerCase();\n }\n\n return newStr;\n }",
"def kebab_case(string):\n \"\"\"\n Converts a string to kebab case. For example::\n\n kebab_case('one_two_three') -> 'one-two-three'\n\n NOTE: To generate valid slugs, use :meth:`slugify`\n \"\"\"\n if not string:\n return string\n string = string.replace('_', '-').replace(' ', '-')\n return de_camel(string, '-')",
"def camelCaseToDashName(camelCase):\n '''\n camelCaseToDashName - Convert a camel case name to a dash-name (like paddingTop to padding-top)\n\n @param camelCase <str> - A camel-case string\n\n @return <str> - A dash-name\n '''\n\n camelCaseList = list(camelCase)\n\n ret = []\n\n for ch in camelCaseList:\n if ch.isupper():\n ret.append('-')\n ret.append(ch.lower())\n else:\n ret.append(ch)\n\n return ''.join(ret)",
"def dasherize(word)\n classify(word).to_s.gsub(/::/, '/').\n gsub(/([A-Z]+)([A-Z][a-z])/,'\\1_\\2').\n gsub(/([a-z\\d])([A-Z])/,'\\1_\\2').\n tr(\"_\", \"-\").downcase\n end",
"def delimit_words(string: str) -> Generator[str, None, None]:\n \"\"\"\n Delimit a string at word boundaries.\n\n ::\n\n >>> import uqbar.strings\n >>> list(uqbar.strings.delimit_words(\"i want to believe\"))\n ['i', 'want', 'to', 'believe']\n\n ::\n\n >>> list(uqbar.strings.delimit_words(\"S3Bucket\"))\n ['S3', 'Bucket']\n\n ::\n\n >>> list(uqbar.strings.delimit_words(\"Route53\"))\n ['Route', '53']\n\n \"\"\"\n # TODO: Reimplement this\n wordlike_characters = (\"<\", \">\", \"!\")\n current_word = \"\"\n for i, character in enumerate(string):\n if (\n not character.isalpha()\n and not character.isdigit()\n and character not in wordlike_characters\n ):\n if current_word:\n yield current_word\n current_word = \"\"\n elif not current_word:\n current_word += character\n elif character.isupper():\n if current_word[-1].isupper():\n current_word += character\n else:\n yield current_word\n current_word = character\n elif character.islower():\n if current_word[-1].isalpha():\n current_word += character\n else:\n yield current_word\n current_word = character\n elif character.isdigit():\n if current_word[-1].isdigit() or current_word[-1].isupper():\n current_word += character\n else:\n yield current_word\n current_word = character\n elif character in wordlike_characters:\n if current_word[-1] in wordlike_characters:\n current_word += character\n else:\n yield current_word\n current_word = character\n if current_word:\n yield current_word",
"function dashProperCase(str) {\n if ( !str.length ) {\n return str;\n }\n return str.split('-').map(function (piece) {\n if ( piece.length ) {\n return piece.substring(0, 1).toUpperCase() + piece.substring(1);\n }\n return piece;\n }).join('-');\n }",
"def camel_cased_to_dasherized(camel_cased_word)\n word = camel_cased_word.to_s.dup\n word.gsub!(/([A-Z\\d]+)([A-Z][a-z])/,'\\1-\\2')\n word.gsub!(/([a-z\\d])([A-Z])/,'\\1-\\2')\n word.downcase!\n word\n end",
"def title_case(string):\n \"\"\"\n Converts a string to title case. For example::\n\n title_case('one_two_three') -> 'One Two Three'\n \"\"\"\n if not string:\n return string\n string = string.replace('_', ' ').replace('-', ' ')\n parts = de_camel(string, ' ', _lowercase=False).strip().split(' ')\n return ' '.join([part if part.isupper() else part.title()\n for part in parts])",
"def convert(self, string):\n \"\"\"Return a copy of string converted to case.\n\n Parameters\n ----------\n string : `str`\n\n Returns\n -------\n `str`\n\n Examples\n --------\n >>> CharCase.LOWER.convert('sTr InG')\n 'str ing'\n >>> CharCase.UPPER.convert('sTr InG')\n 'STR ING'\n >>> CharCase.TITLE.convert('sTr InG')\n 'Str ing'\n >>> CharCase.PRESERVE.convert('sTr InG')\n 'sTr InG'\n \"\"\"\n if self == self.__class__.TITLE:\n return capitalize(string)\n if self == self.__class__.UPPER:\n return string.upper()\n if self == self.__class__.LOWER:\n return string.lower()\n return string"
] |
[
0.7056986689567566,
0.7022408246994019,
0.6875450015068054,
0.6869301795959473,
0.6775718927383423,
0.6666778922080994,
0.6664897799491882,
0.6663722395896912,
0.6655914783477783,
0.662871778011322,
0.661432147026062,
0.6612073183059692
] |
returns a list of all fixers found in the lib2to3 library
|
def get_lib2to3_fixers():
'''returns a list of all fixers found in the lib2to3 library'''
fixers = []
fixer_dirname = fixer_dir.__path__[0]
for name in sorted(os.listdir(fixer_dirname)):
if name.startswith("fix_") and name.endswith(".py"):
fixers.append("lib2to3.fixes." + name[:-3])
return fixers
|
[
"def get_single_fixer(fixname):\n '''return a single fixer found in the lib2to3 library'''\n fixer_dirname = fixer_dir.__path__[0]\n for name in sorted(os.listdir(fixer_dirname)):\n if (name.startswith(\"fix_\") and name.endswith(\".py\") \n and fixname == name[4:-3]):\n return \"lib2to3.fixes.\" + name[:-3]",
"def fix_2to3(source,\n aggressive=True, select=None, ignore=None, filename='',\n where='global', verbose=False):\n \"\"\"Fix various deprecated code (via lib2to3).\"\"\"\n if not aggressive:\n return source\n\n select = select or []\n ignore = ignore or []\n\n return refactor(source,\n code_to_2to3(select=select,\n ignore=ignore,\n where=where,\n verbose=verbose),\n filename=filename)",
"def fix_2to3(source,\n aggressive=True, select=None, ignore=None, filename=''):\n \"\"\"Fix various deprecated code (via lib2to3).\"\"\"\n if not aggressive:\n return source\n\n select = select or []\n ignore = ignore or []\n\n return refactor(source,\n code_to_2to3(select=select,\n ignore=ignore),\n filename=filename)",
"def refactor_with_2to3(source_text, fixer_names, filename=''):\n \"\"\"Use lib2to3 to refactor the source.\n\n Return the refactored source code.\n\n \"\"\"\n from lib2to3.refactor import RefactoringTool\n fixers = ['lib2to3.fixes.fix_' + name for name in fixer_names]\n tool = RefactoringTool(fixer_names=fixers, explicit=fixers)\n\n from lib2to3.pgen2 import tokenize as lib2to3_tokenize\n try:\n # The name parameter is necessary particularly for the \"import\" fixer.\n return unicode(tool.refactor_string(source_text, name=filename))\n except lib2to3_tokenize.TokenError:\n return source_text",
"def get_fixers(self):\n \"\"\"Inspects the options to load the requested patterns and handlers.\n\n Returns:\n (pre_order, post_order), where pre_order is the list of fixers that\n want a pre-order AST traversal, and post_order is the list that want\n post-order traversal.\n \"\"\"\n pre_order_fixers = []\n post_order_fixers = []\n for fix_mod_path in self.fixers:\n mod = __import__(fix_mod_path, {}, {}, [\"*\"])\n fix_name = fix_mod_path.rsplit(\".\", 1)[-1]\n if fix_name.startswith(self.FILE_PREFIX):\n fix_name = fix_name[len(self.FILE_PREFIX):]\n parts = fix_name.split(\"_\")\n class_name = self.CLASS_PREFIX + \"\".join([p.title() for p in parts])\n try:\n fix_class = getattr(mod, class_name)\n except AttributeError:\n raise FixerError(\"Can't find %s.%s\" % (fix_name, class_name))\n fixer = fix_class(self.options, self.fixer_log)\n if fixer.explicit and self.explicit is not True and \\\n fix_mod_path not in self.explicit:\n self.log_message(\"Skipping implicit fixer: %s\", fix_name)\n continue\n\n self.log_debug(\"Adding transformation: %s\", fix_name)\n if fixer.order == \"pre\":\n pre_order_fixers.append(fixer)\n elif fixer.order == \"post\":\n post_order_fixers.append(fixer)\n else:\n raise FixerError(\"Illegal fixer order: %r\" % fixer.order)\n\n key_func = operator.attrgetter(\"run_order\")\n pre_order_fixers.sort(key=key_func)\n post_order_fixers.sort(key=key_func)\n return (pre_order_fixers, post_order_fixers)",
"def get_all_fix_names(fixer_pkg, remove_prefix=True):\n \"\"\"Return a sorted list of all available fix names in the given package.\"\"\"\n pkg = __import__(fixer_pkg, [], [], [\"*\"])\n fixer_dir = os.path.dirname(pkg.__file__)\n fix_names = []\n for name in sorted(os.listdir(fixer_dir)):\n if name.startswith(\"fix_\") and name.endswith(\".py\"):\n if remove_prefix:\n name = name[4:]\n fix_names.append(name[:-3])\n return fix_names",
"def command_py2to3(args):\n \"\"\"\n Apply '2to3' tool (Python2 to Python3 conversion tool) to Python sources.\n \"\"\"\n from lib2to3.main import main\n sys.exit(main(\"lib2to3.fixes\", args=args.sources))",
"def _get_headnode_dict(fixer_list):\n \"\"\" Accepts a list of fixers and returns a dictionary\n of head node type --> fixer list. \"\"\"\n head_nodes = collections.defaultdict(list)\n every = []\n for fixer in fixer_list:\n if fixer.pattern:\n try:\n heads = _get_head_types(fixer.pattern)\n except _EveryNode:\n every.append(fixer)\n else:\n for node_type in heads:\n head_nodes[node_type].append(fixer)\n else:\n if fixer._accept_type is not None:\n head_nodes[fixer._accept_type].append(fixer)\n else:\n every.append(fixer)\n for node_type in chain(pygram.python_grammar.symbol2number.itervalues(),\n pygram.python_grammar.tokens):\n head_nodes[node_type].extend(every)\n return dict(head_nodes)",
"def patch_lib_paths(fname, library_dirs):\n \"\"\"Load any weakly-defined libraries from their real location\n \n (only on OS X)\n \n - Find libraries with `otool -L`\n - Update with `install_name_tool -change`\n \"\"\"\n if sys.platform != 'darwin':\n return\n \n libs = _get_libs(fname)\n for lib in libs:\n if not lib.startswith(('@', '/')):\n real_lib = _find_library(lib, library_dirs)\n if real_lib:\n _install_name_change(fname, lib, real_lib)",
"def getThirdPartyLibDefinitions(self, libs):\n\t\t\"\"\"\n\t\tRetrieves the list of preprocessor definitions for building against the Unreal-bundled versions of the specified third-party libraries\n\t\t\"\"\"\n\t\tplatformDefaults = True\n\t\tif libs[0] == '--nodefaults':\n\t\t\tplatformDefaults = False\n\t\t\tlibs = libs[1:]\n\t\t\n\t\tdetails = self.getThirdpartyLibs(libs, includePlatformDefaults=platformDefaults)\n\t\treturn details.getPreprocessorDefinitions(self.getEngineRoot(), delimiter='\\n')",
"def fixed(self):\n \"\"\"Returns a list of just the fixed source roots in the trie.\"\"\"\n for key, child in self._root.children.items():\n if key == '^':\n return list(child.subpatterns())\n return []",
"def getThirdPartyLibFiles(self, libs):\n\t\t\"\"\"\n\t\tRetrieves the list of library files for building against the Unreal-bundled versions of the specified third-party libraries\n\t\t\"\"\"\n\t\tplatformDefaults = True\n\t\tif libs[0] == '--nodefaults':\n\t\t\tplatformDefaults = False\n\t\t\tlibs = libs[1:]\n\t\t\n\t\tdetails = self.getThirdpartyLibs(libs, includePlatformDefaults=platformDefaults)\n\t\treturn details.getLibraryFiles(self.getEngineRoot(), delimiter='\\n')"
] |
[
0.8299906253814697,
0.7078806161880493,
0.7032569050788879,
0.698115885257721,
0.6977643966674805,
0.6867755055427551,
0.6652924418449402,
0.6635935306549072,
0.6580876708030701,
0.6555743217468262,
0.6539329290390015,
0.6534920930862427
] |
return a single fixer found in the lib2to3 library
|
def get_single_fixer(fixname):
'''return a single fixer found in the lib2to3 library'''
fixer_dirname = fixer_dir.__path__[0]
for name in sorted(os.listdir(fixer_dirname)):
if (name.startswith("fix_") and name.endswith(".py")
and fixname == name[4:-3]):
return "lib2to3.fixes." + name[:-3]
|
[
"def get_lib2to3_fixers():\n '''returns a list of all fixers found in the lib2to3 library'''\n fixers = []\n fixer_dirname = fixer_dir.__path__[0]\n for name in sorted(os.listdir(fixer_dirname)):\n if name.startswith(\"fix_\") and name.endswith(\".py\"):\n fixers.append(\"lib2to3.fixes.\" + name[:-3])\n return fixers",
"def refactor_with_2to3(source_text, fixer_names, filename=''):\n \"\"\"Use lib2to3 to refactor the source.\n\n Return the refactored source code.\n\n \"\"\"\n from lib2to3.refactor import RefactoringTool\n fixers = ['lib2to3.fixes.fix_' + name for name in fixer_names]\n tool = RefactoringTool(fixer_names=fixers, explicit=fixers)\n\n from lib2to3.pgen2 import tokenize as lib2to3_tokenize\n try:\n # The name parameter is necessary particularly for the \"import\" fixer.\n return unicode(tool.refactor_string(source_text, name=filename))\n except lib2to3_tokenize.TokenError:\n return source_text",
"def fix_2to3(source,\n aggressive=True, select=None, ignore=None, filename='',\n where='global', verbose=False):\n \"\"\"Fix various deprecated code (via lib2to3).\"\"\"\n if not aggressive:\n return source\n\n select = select or []\n ignore = ignore or []\n\n return refactor(source,\n code_to_2to3(select=select,\n ignore=ignore,\n where=where,\n verbose=verbose),\n filename=filename)",
"def fix_2to3(source,\n aggressive=True, select=None, ignore=None, filename=''):\n \"\"\"Fix various deprecated code (via lib2to3).\"\"\"\n if not aggressive:\n return source\n\n select = select or []\n ignore = ignore or []\n\n return refactor(source,\n code_to_2to3(select=select,\n ignore=ignore),\n filename=filename)",
"def refactor(source, fixer_names, ignore=None, filename=''):\n \"\"\"Return refactored code using lib2to3.\n\n Skip if ignore string is produced in the refactored code.\n\n \"\"\"\n check_lib2to3()\n from lib2to3 import pgen2\n try:\n new_text = refactor_with_2to3(source,\n fixer_names=fixer_names,\n filename=filename)\n except (pgen2.parse.ParseError,\n SyntaxError,\n UnicodeDecodeError,\n UnicodeEncodeError):\n return source\n\n if ignore:\n if ignore in new_text and ignore not in source:\n return source\n\n return new_text",
"def command_py2to3(args):\n \"\"\"\n Apply '2to3' tool (Python2 to Python3 conversion tool) to Python sources.\n \"\"\"\n from lib2to3.main import main\n sys.exit(main(\"lib2to3.fixes\", args=args.sources))",
"def fix(self, item):\n \"\"\"\n Make sure all strings are unicode for python 2/3 compatability\n \"\"\"\n if not self.is_python_2:\n return item\n if isinstance(item, str):\n return item.decode(\"utf-8\")\n if isinstance(item, unicode): # noqa <-- python3 has no unicode\n return item\n if isinstance(item, Mapping):\n return dict(map(self.fix, item.items()))\n elif isinstance(item, Iterable):\n return type(item)(map(self.fix, item))\n\n return item",
"function getFixerFunction(styleType, previousItemToken, commaToken, currentItemToken) {\n const text =\n sourceCode.text.slice(previousItemToken.range[1], commaToken.range[0]) +\n sourceCode.text.slice(commaToken.range[1], currentItemToken.range[0]);\n const range = [previousItemToken.range[1], currentItemToken.range[0]];\n\n return function(fixer) {\n return fixer.replaceTextRange(range, getReplacedText(styleType, text));\n };\n }",
"def fixer(base, target):\n \"\"\"Parse data from fixer.io.\"\"\"\n api_url = 'http://api.fixer.io/latest'\n resp = requests.get(\n api_url,\n params={\n 'base': base,\n 'symbols': target,\n },\n timeout=1,\n )\n data = resp.json()\n return decimal.Decimal(data['rates'][target])",
"def main(fixer_pkg, args=None):\n \"\"\"Main program.\n\n Args:\n fixer_pkg: the name of a package where the fixers are located.\n args: optional; a list of command line arguments. If omitted,\n sys.argv[1:] is used.\n\n Returns a suggested exit status (0, 1, 2).\n \"\"\"\n # Set up option parser\n parser = optparse.OptionParser(usage=\"2to3 [options] file|dir ...\")\n parser.add_option(\"-d\", \"--doctests_only\", action=\"store_true\",\n help=\"Fix up doctests only\")\n parser.add_option(\"-f\", \"--fix\", action=\"append\", default=[],\n help=\"Each FIX specifies a transformation; default: all\")\n parser.add_option(\"-j\", \"--processes\", action=\"store\", default=1,\n type=\"int\", help=\"Run 2to3 concurrently\")\n parser.add_option(\"-x\", \"--nofix\", action=\"append\", default=[],\n help=\"Prevent a transformation from being run\")\n parser.add_option(\"-l\", \"--list-fixes\", action=\"store_true\",\n help=\"List available transformations\")\n parser.add_option(\"-p\", \"--print-function\", action=\"store_true\",\n help=\"Modify the grammar so that print() is a function\")\n parser.add_option(\"-v\", \"--verbose\", action=\"store_true\",\n help=\"More verbose logging\")\n parser.add_option(\"--no-diffs\", action=\"store_true\",\n help=\"Don't show diffs of the refactoring\")\n parser.add_option(\"-w\", \"--write\", action=\"store_true\",\n help=\"Write back modified files\")\n parser.add_option(\"-n\", \"--nobackups\", action=\"store_true\", default=False,\n help=\"Don't write backups for modified files\")\n parser.add_option(\"-o\", \"--output-dir\", action=\"store\", type=\"str\",\n default=\"\", help=\"Put output files in this directory \"\n \"instead of overwriting the input files. Requires -n.\")\n parser.add_option(\"-W\", \"--write-unchanged-files\", action=\"store_true\",\n help=\"Also write files even if no changes were required\"\n \" (useful with --output-dir); implies -w.\")\n parser.add_option(\"--add-suffix\", action=\"store\", type=\"str\", default=\"\",\n help=\"Append this string to all output filenames.\"\n \" Requires -n if non-empty. \"\n \"ex: --add-suffix='3' will generate .py3 files.\")\n\n # Parse command line arguments\n refactor_stdin = False\n flags = {}\n options, args = parser.parse_args(args)\n if options.write_unchanged_files:\n flags[\"write_unchanged_files\"] = True\n if not options.write:\n warn(\"--write-unchanged-files/-W implies -w.\")\n options.write = True\n # If we allowed these, the original files would be renamed to backup names\n # but not replaced.\n if options.output_dir and not options.nobackups:\n parser.error(\"Can't use --output-dir/-o without -n.\")\n if options.add_suffix and not options.nobackups:\n parser.error(\"Can't use --add-suffix without -n.\")\n\n if not options.write and options.no_diffs:\n warn(\"not writing files and not printing diffs; that's not very useful\")\n if not options.write and options.nobackups:\n parser.error(\"Can't use -n without -w\")\n if options.list_fixes:\n print \"Available transformations for the -f/--fix option:\"\n for fixname in refactor.get_all_fix_names(fixer_pkg):\n print fixname\n if not args:\n return 0\n if not args:\n print >> sys.stderr, \"At least one file or directory argument required.\"\n print >> sys.stderr, \"Use --help to show usage.\"\n return 2\n if \"-\" in args:\n refactor_stdin = True\n if options.write:\n print >> sys.stderr, \"Can't write to stdin.\"\n return 2\n if options.print_function:\n flags[\"print_function\"] = True\n\n # Set up logging handler\n level = logging.DEBUG if options.verbose else logging.INFO\n logging.basicConfig(format='%(name)s: %(message)s', level=level)\n logger = logging.getLogger('lib2to3.main')\n\n # Initialize the refactoring tool\n avail_fixes = set(refactor.get_fixers_from_package(fixer_pkg))\n unwanted_fixes = set(fixer_pkg + \".fix_\" + fix for fix in options.nofix)\n explicit = set()\n if options.fix:\n all_present = False\n for fix in options.fix:\n if fix == \"all\":\n all_present = True\n else:\n explicit.add(fixer_pkg + \".fix_\" + fix)\n requested = avail_fixes.union(explicit) if all_present else explicit\n else:\n requested = avail_fixes.union(explicit)\n fixer_names = requested.difference(unwanted_fixes)\n input_base_dir = os.path.commonprefix(args)\n if (input_base_dir and not input_base_dir.endswith(os.sep)\n and not os.path.isdir(input_base_dir)):\n # One or more similar names were passed, their directory is the base.\n # os.path.commonprefix() is ignorant of path elements, this corrects\n # for that weird API.\n input_base_dir = os.path.dirname(input_base_dir)\n if options.output_dir:\n input_base_dir = input_base_dir.rstrip(os.sep)\n logger.info('Output in %r will mirror the input directory %r layout.',\n options.output_dir, input_base_dir)\n rt = StdoutRefactoringTool(\n sorted(fixer_names), flags, sorted(explicit),\n options.nobackups, not options.no_diffs,\n input_base_dir=input_base_dir,\n output_dir=options.output_dir,\n append_suffix=options.add_suffix)\n\n # Refactor all files and directories passed as arguments\n if not rt.errors:\n if refactor_stdin:\n rt.refactor_stdin()\n else:\n try:\n rt.refactor(args, options.write, options.doctests_only,\n options.processes)\n except refactor.MultiprocessingUnsupported:\n assert options.processes > 1\n print >> sys.stderr, \"Sorry, -j isn't \" \\\n \"supported on this platform.\"\n return 1\n rt.summarize()\n\n # Return error status (0 if rt.errors is zero)\n return int(bool(rt.errors))",
"private Fix longFix(ExpressionTree expr, VisitorState state) {\n BinaryTree binExpr = null;\n while (expr instanceof BinaryTree) {\n binExpr = (BinaryTree) expr;\n expr = binExpr.getLeftOperand();\n }\n if (!(expr instanceof LiteralTree) || expr.getKind() != Kind.INT_LITERAL) {\n return null;\n }\n Type intType = state.getSymtab().intType;\n if (!isSameType(getType(binExpr), intType, state)) {\n return null;\n }\n SuggestedFix.Builder fix = SuggestedFix.builder().postfixWith(expr, \"L\");\n Tree parent = state.getPath().getParentPath().getLeaf();\n if (parent instanceof VariableTree && isSameType(getType(parent), intType, state)) {\n fix.replace(((VariableTree) parent).getType(), \"long\");\n }\n return fix.build();\n }",
"function makeFixer(options) {\n if (typeof options === 'undefined') {\n return true;\n }\n\n if (typeof options === 'boolean') {\n return options;\n }\n\n const rulesToFix = options.rules;\n const fixWarnings = options.warnings;\n\n function ruleFixer(eslintMessage) {\n if (!rulesToFix) return true;\n\n if (rulesToFix.indexOf(eslintMessage.ruleId) !== -1) {\n return true;\n }\n return false;\n }\n\n function warningFixer(eslintMessage) {\n if (fixWarnings === false) {\n return eslintMessage.severity === 2;\n }\n\n return true;\n }\n\n return function (eslintMessage) {\n return ruleFixer(eslintMessage) && warningFixer(eslintMessage);\n };\n}"
] |
[
0.8598690032958984,
0.7472712397575378,
0.7428495287895203,
0.7360685467720032,
0.7156214118003845,
0.6804410219192505,
0.6788302063941956,
0.6769159436225891,
0.6761971116065979,
0.6723169088363647,
0.671269953250885,
0.6692692637443542
] |
Returns field's single value prepared for saving into a database.
|
def to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
# ensure value is valid
self.validate(value)
assert isinstance(value, list)
value = list(value)
for i, v in enumerate(value):
value[i] = self.value_to_db(v)
# return result
assert isinstance(value, list)
return value
|
[
"def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_value.set(k, '')\n else:\n # Need to convert File objects provided via a form to\n # unicode for database insertion\n prep_value.set(k, six.text_type(v))\n return super().get_prep_value(prep_value)\n return super().get_prep_value(value)",
"def get_prep_value(self, value):\n \"Returns field's value prepared for saving into a database.\"\n value = self.get_clean_value(value)\n if self.multiple:\n if value:\n value = \",\".join(value)\n else:\n value = \"\"\n return super(CharField, self).get_prep_value(value)",
"def value(self):\n \"\"\"Property to be used when saving a custom field into\n :class:`couchbasekit.document.Document` instance.\n\n :returns: The value to be saved for the field within\n :class:`couchbasekit.document.Document` instances.\n :rtype: mixed\n \"\"\"\n if self._value is None:\n raise ValueError(\"%s's 'value' is not set.\" % type(self).__name__)\n return self._value",
"public T getSingleValue() {\n List<T> values = getSelectedValue();\n if (!values.isEmpty()) {\n return values.get(0);\n }\n return null;\n }",
"def get_db_prep_value(self, value, connection=None, prepared=False):\n \"\"\"Returns field's value prepared for interacting with the database\n backend.\n\n Used by the default implementations of ``get_db_prep_save``and\n `get_db_prep_lookup```\n \"\"\"\n if not value:\n return\n if prepared:\n return value\n else:\n assert(isinstance(value, list) or isinstance(value, tuple))\n return self.separator.join([unicode(s) for s in value])",
"def pre_save(self, model_instance, add):\n \"\"\"Returns field's value just before saving.\"\"\"\n value = super().pre_save(model_instance, add)\n if isinstance(value, LocalizedValue):\n for file in value.__dict__.values():\n if file and not file._committed:\n file.save(file.name, file, save=False)\n return value",
"def pre_save(self, *args, **kwargs):\n \"Returns field's value just before saving.\"\n value = super(CharField, self).pre_save(*args, **kwargs)\n return self.get_prep_value(value)",
"def get_db_prep_save(self, value, connection=None):\n \"\"\"\n Returns field's value prepared for saving into a database.\n \"\"\"\n ## convert to settings.TIME_ZONE\n if value is not None:\n if value.tzinfo is None:\n value = default_tz.localize(value)\n else:\n value = value.astimezone(default_tz)\n return super(LocalizedDateTimeField, self).get_db_prep_save(value, connection=connection)",
"def _get_prepped_model_field(model_obj, field):\n \"\"\"\n Gets the value of a field of a model obj that is prepared for the db.\n \"\"\"\n\n # Get the field\n field = model_obj._meta.get_field(field)\n\n # Get the value\n value = field.get_db_prep_save(getattr(model_obj, field.attname), connection)\n\n # Return the value\n return value",
"def get_pk(self, field_val):\n \"\"\"convenience method for running is_pk(_id).get_one() since this is so common\"\"\"\n field_name = self.schema.pk.name\n return self.is_field(field_name, field_val).get_one()",
"def value_to_db(self, value):\n \"\"\" Returns field's single value prepared for saving into a database. \"\"\"\n assert isinstance(value, datetime.datetime)\n\n try:\n value = value - datetime.datetime(1970, 1, 1)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n value = value.seconds + value.days * 24 * 3600\n value = str(value).encode(\"utf_8\")\n\n return value",
"def value(self):\n \"\"\"convenience method to just get one value or tuple of values for the query\"\"\"\n field_vals = None\n field_names = self.fields_select.names()\n fcount = len(field_names)\n if fcount:\n d = self._query('get_one')\n if d:\n field_vals = [d.get(fn, None) for fn in field_names]\n if fcount == 1:\n field_vals = field_vals[0]\n\n else:\n raise ValueError(\"no select fields were set, so cannot return value\")\n\n return field_vals"
] |
[
0.7930915951728821,
0.7762113213539124,
0.7404974699020386,
0.7379361987113953,
0.7321757078170776,
0.7313757538795471,
0.7301207184791565,
0.7283756136894226,
0.7269089221954346,
0.7263267040252686,
0.7263159155845642,
0.7262715101242065
] |
Converts the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Returns the converted value. Subclasses should override this.
|
def to_python(self, value):
"""
Converts the input value into the expected Python data type, raising
django.core.exceptions.ValidationError if the data can't be converted.
Returns the converted value. Subclasses should override this.
"""
assert isinstance(value, list)
# convert every value in list
value = list(value)
for i, v in enumerate(value):
value[i] = self.value_to_python(v)
# return result
return value
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n value = value.decode(\"utf_8\")\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n length = len(value) - 8\n if length % 4 != 0:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n length = length // 4\n\n array = struct.unpack('<bbbbbbbb' + 'I' * length, value)\n\n if array[1] != length:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array[2:7] != (0, 0, 0, 0, 0):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = (\"S\", ) + array[0:1] + array[7:]\n return \"-\".join([str(i) for i in array])",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, str):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = value.split(\"-\")\n length = len(array) - 3\n\n if length < 1:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array.pop(0) != \"S\":\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n try:\n [int(i) for i in array]\n except TypeError:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8508092164993286,
0.8455279469490051,
0.8399019241333008,
0.8279650807380676,
0.8272891640663147,
0.8231707215309143,
0.814752995967865,
0.806265115737915,
0.8001563549041748,
0.7947636246681213,
0.7799067497253418,
0.7714439630508423
] |
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
|
def validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
# check object type
if not isinstance(value, list):
raise tldap.exceptions.ValidationError(
"is not a list and max_instances is %s" %
self._max_instances)
# check maximum instances
if (self._max_instances is not None and
len(value) > self._max_instances):
raise tldap.exceptions.ValidationError(
"exceeds max_instances of %d" %
self._max_instances)
# check this required value is given
if self._required:
if len(value) == 0:
raise tldap.exceptions.ValidationError(
"is required")
# validate the value
for i, v in enumerate(value):
self.value_validate(v)
|
[
"def validate(self, value, model_instance):\n \"\"\"\n Validates value and throws ValidationError. Subclasses should override\n this to provide validation logic.\n \"\"\"\n return super(self.__class__, self).validate(value.value, model_instance)",
"def value_validate(self, value):\n \"\"\"\n Validates value and throws ValidationError. Subclasses should override\n this to provide validation logic.\n \"\"\"\n if not isinstance(value, six.string_types):\n raise tldap.exceptions.ValidationError(\"should be a string\")",
"def validate(self, value, model_instance):\n \"\"\"\n Validates value and throws ValidationError. Subclasses should override\n this to provide validation logic.\n \"\"\"\n # pylint: disable=newstyle\n super(TimeZoneField, self).validate(\n value=self.get_prep_value(value),\n model_instance=model_instance\n )\n\n # Insure the value is can be converted to a timezone\n self.to_python(value)",
"def validate(self, value):\n \"\"\"\n Returns a cleaned and validated value. Raises a ValidationError\n if there's a problem\n \"\"\"\n if value is None:\n if self.required:\n raise ValidationError('{} - None values are not allowed'.format(self.column_name or self.db_field))\n return value",
"def validate(self, value):\n \"\"\"Performs validation of the value.\n :param value: value to validate\n :raise ValidationError if the value is invalid\"\"\"\n\n # check choices\n if self.choices:\n if isinstance(self.choices[0], (list, tuple)):\n option_keys = [k for k, v in self.choices]\n if value not in option_keys:\n msg = ('Value {0} is not listed among valid choices {1}'.format(value, option_keys))\n self.raise_error(msg)\n elif value not in self.choices:\n msg = ('Value {0} is not listed among valid choices {1}'.format(value, self.choices))\n self.raise_error(msg)",
"def validate(self, value):\n \"\"\"Validate a value for this field. If the field is invalid, this\n will raise a ValueError. Runs ``pre_validate`` hook prior to\n validation, and returns value if validation passes.\"\"\"\n value = self.pre_validate(value)\n if not self._typecheck(value):\n raise ValueError('%r failed type check' % value)\n return value",
"def _validate(self, value):\n \"\"\"Perform validation on ``value``. Raise a :exc:`ValidationError` if validation\n does not succeed.\n \"\"\"\n errors = []\n kwargs = {}\n for validator in self.validators:\n try:\n r = validator(value)\n if not isinstance(validator, Validator) and r is False:\n self.fail('validator_failed')\n except ValidationError as err:\n kwargs.update(err.kwargs)\n if isinstance(err.messages, dict):\n errors.append(err.messages)\n else:\n errors.extend(err.messages)\n if errors:\n raise ValidationError(errors, **kwargs)",
"def clean(self, value):\n \"\"\"Take a dirty value and clean it.\"\"\"\n if (\n self.base_type is not None and\n value is not None and\n not isinstance(value, self.base_type)\n ):\n if isinstance(self.base_type, tuple):\n allowed_types = [typ.__name__ for typ in self.base_type]\n allowed_types_text = ' or '.join(allowed_types)\n else:\n allowed_types_text = self.base_type.__name__\n err_msg = 'Value must be of %s type.' % allowed_types_text\n raise ValidationError(err_msg)\n\n if not self.has_value(value):\n if self.default is not None:\n raise StopValidation(self.default)\n\n if self.required:\n raise ValidationError('This field is required.')\n else:\n raise StopValidation(self.blank_value)\n\n return value",
"def validate(self, value):\n \"\"\"\n Applies the validation criteria.\n Returns value, new value, or None if invalid.\n\n Overload this in derived classes.\n \"\"\"\n try:\n # trap blank fields here\n if not self.blank or value:\n float(value)\n return value\n except ValueError:\n return None",
"def validate(self, value):\n \"\"\"Validate value.\"\"\"\n if self.exclusive:\n if value >= self.maximum_value:\n tpl = \"'{val}' is bigger or equal than maximum ('{max}').\"\n raise ValidationError(\n tpl.format(val=value, max=self.maximum_value))\n else:\n if value > self.maximum_value:\n raise ValidationError(\n \"'{value}' is bigger than maximum ('{max}').\".format(\n value=value, max=self.maximum_value))",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def validate(self, value):\n \"\"\"Validate value.\"\"\"\n if self.exclusive:\n if value <= self.minimum_value:\n tpl = \"'{value}' is lower or equal than minimum ('{min}').\"\n raise ValidationError(\n tpl.format(value=value, min=self.minimum_value))\n else:\n if value < self.minimum_value:\n raise ValidationError(\n \"'{value}' is lower than minimum ('{min}').\".format(\n value=value, min=self.minimum_value))"
] |
[
0.8405061364173889,
0.8293669819831848,
0.7976804971694946,
0.7885225415229797,
0.7805082201957703,
0.7797256708145142,
0.779566764831543,
0.7763097882270813,
0.7717840671539307,
0.7703309655189514,
0.7695854902267456,
0.7688673138618469
] |
Convert the value's type and run validation. Validation errors from
to_python and validate are propagated. The correct value is returned if
no error is raised.
|
def clean(self, value):
"""
Convert the value's type and run validation. Validation errors from
to_python and validate are propagated. The correct value is returned if
no error is raised.
"""
value = self.to_python(value)
self.validate(value)
return value
|
[
"def clean(self, value, model_instance):\n \"\"\"\n Convert the value's type and run validation. Validation errors\n from to_python and validate are propagated. The correct value is\n returned if no error is raised.\n \"\"\"\n #: return constant's name instead of constant itself\n value = self.to_python(value).name\n self.validate(value, model_instance)\n self.run_validators(value)\n return value",
"def clean(self, value):\n \"\"\"Run validators and return the clean value.\"\"\"\n if value is None:\n value = self.default\n\n try:\n value = self.to_python(value)\n self.validate(value)\n except ValidationError as error:\n raise ValidationError(\"invalid value for {}: {}\".format(\n self.name,\n error.args[0]\n ))\n return value",
"def run_validation(self, data=empty):\n \"\"\"\n We override the default `run_validation`, because the validation\n performed by validators and the `.validate()` method should\n be coerced into an error dictionary with a 'non_fields_error' key.\n \"\"\"\n (is_empty_value, data) = self.validate_empty_values(data)\n if is_empty_value:\n return data\n\n value = self.to_internal_value(data)\n try:\n self.run_validators(value)\n value = self.validate(value)\n assert value is not None, '.validate() should return the validated data'\n except (ValidationError, DjangoValidationError) as exc:\n raise ValidationError(detail=get_validation_error_detail(exc))\n\n return value",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def validate(self, value):\n \"\"\"Validate a value for this field. If the field is invalid, this\n will raise a ValueError. Runs ``pre_validate`` hook prior to\n validation, and returns value if validation passes.\"\"\"\n value = self.pre_validate(value)\n if not self._typecheck(value):\n raise ValueError('%r failed type check' % value)\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def validate_value(self, value):\n \"\"\"\n We call validation from the underlying form field\n \"\"\"\n field = self.instance.preference.setup_field()\n value = field.to_python(value)\n field.validate(value)\n field.run_validators(value)\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def make_stone_friendly(self, data_type, val, validate):\n \"\"\"\n Convert a Python object to a type that will pass validation by its\n validator.\n Validation by ``alias_validators`` is performed even if ``validate`` is\n false.\n \"\"\"\n if isinstance(data_type, bv.Timestamp):\n try:\n ret = datetime.datetime.strptime(val, data_type.format)\n except (TypeError, ValueError) as e:\n raise bv.ValidationError(e.args[0])\n elif isinstance(data_type, bv.Bytes):\n if self.for_msgpack:\n if isinstance(val, six.text_type):\n ret = val.encode('utf-8')\n else:\n ret = val\n else:\n try:\n ret = base64.b64decode(val)\n except TypeError:\n raise bv.ValidationError('invalid base64-encoded bytes')\n elif isinstance(data_type, bv.Void):\n if self.strict and val is not None:\n raise bv.ValidationError(\"expected null, got value\")\n return None\n else:\n if validate:\n if self.caller_permissions.permissions:\n data_type.validate_with_permissions(val, self.caller_permissions)\n else:\n data_type.validate(val)\n ret = val\n if self.alias_validators is not None and data_type in self.alias_validators:\n self.alias_validators[data_type](ret)\n return ret"
] |
[
0.7912411093711853,
0.7856140732765198,
0.7601820230484009,
0.7515721321105957,
0.7506422996520996,
0.7505135536193848,
0.7475513815879822,
0.7470663785934448,
0.746669590473175,
0.7434817552566528,
0.737034261226654,
0.7361854314804077
] |
Returns field's single value prepared for saving into a database.
|
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
if isinstance(value, six.string_types):
value = value.encode("utf_8")
return value
|
[
"def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_value.set(k, '')\n else:\n # Need to convert File objects provided via a form to\n # unicode for database insertion\n prep_value.set(k, six.text_type(v))\n return super().get_prep_value(prep_value)\n return super().get_prep_value(value)",
"def get_prep_value(self, value):\n \"Returns field's value prepared for saving into a database.\"\n value = self.get_clean_value(value)\n if self.multiple:\n if value:\n value = \",\".join(value)\n else:\n value = \"\"\n return super(CharField, self).get_prep_value(value)",
"def value(self):\n \"\"\"Property to be used when saving a custom field into\n :class:`couchbasekit.document.Document` instance.\n\n :returns: The value to be saved for the field within\n :class:`couchbasekit.document.Document` instances.\n :rtype: mixed\n \"\"\"\n if self._value is None:\n raise ValueError(\"%s's 'value' is not set.\" % type(self).__name__)\n return self._value",
"public T getSingleValue() {\n List<T> values = getSelectedValue();\n if (!values.isEmpty()) {\n return values.get(0);\n }\n return null;\n }",
"def get_db_prep_value(self, value, connection=None, prepared=False):\n \"\"\"Returns field's value prepared for interacting with the database\n backend.\n\n Used by the default implementations of ``get_db_prep_save``and\n `get_db_prep_lookup```\n \"\"\"\n if not value:\n return\n if prepared:\n return value\n else:\n assert(isinstance(value, list) or isinstance(value, tuple))\n return self.separator.join([unicode(s) for s in value])",
"def pre_save(self, model_instance, add):\n \"\"\"Returns field's value just before saving.\"\"\"\n value = super().pre_save(model_instance, add)\n if isinstance(value, LocalizedValue):\n for file in value.__dict__.values():\n if file and not file._committed:\n file.save(file.name, file, save=False)\n return value",
"def pre_save(self, *args, **kwargs):\n \"Returns field's value just before saving.\"\n value = super(CharField, self).pre_save(*args, **kwargs)\n return self.get_prep_value(value)",
"def get_db_prep_save(self, value, connection=None):\n \"\"\"\n Returns field's value prepared for saving into a database.\n \"\"\"\n ## convert to settings.TIME_ZONE\n if value is not None:\n if value.tzinfo is None:\n value = default_tz.localize(value)\n else:\n value = value.astimezone(default_tz)\n return super(LocalizedDateTimeField, self).get_db_prep_save(value, connection=connection)",
"def _get_prepped_model_field(model_obj, field):\n \"\"\"\n Gets the value of a field of a model obj that is prepared for the db.\n \"\"\"\n\n # Get the field\n field = model_obj._meta.get_field(field)\n\n # Get the value\n value = field.get_db_prep_save(getattr(model_obj, field.attname), connection)\n\n # Return the value\n return value",
"def get_pk(self, field_val):\n \"\"\"convenience method for running is_pk(_id).get_one() since this is so common\"\"\"\n field_name = self.schema.pk.name\n return self.is_field(field_name, field_val).get_one()",
"def value_to_db(self, value):\n \"\"\" Returns field's single value prepared for saving into a database. \"\"\"\n assert isinstance(value, datetime.datetime)\n\n try:\n value = value - datetime.datetime(1970, 1, 1)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n value = value.seconds + value.days * 24 * 3600\n value = str(value).encode(\"utf_8\")\n\n return value",
"def value(self):\n \"\"\"convenience method to just get one value or tuple of values for the query\"\"\"\n field_vals = None\n field_names = self.fields_select.names()\n fcount = len(field_names)\n if fcount:\n d = self._query('get_one')\n if d:\n field_vals = [d.get(fn, None) for fn in field_names]\n if fcount == 1:\n field_vals = field_vals[0]\n\n else:\n raise ValueError(\"no select fields were set, so cannot return value\")\n\n return field_vals"
] |
[
0.7930915951728821,
0.7762113213539124,
0.7404974699020386,
0.7379361987113953,
0.7321757078170776,
0.7313757538795471,
0.7301207184791565,
0.7283756136894226,
0.7269089221954346,
0.7263267040252686,
0.7263159155845642,
0.7262715101242065
] |
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
|
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
value = value.decode("utf_8")
return value
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def to_python(self, value):\n \"\"\"\n Converts the input value into the expected Python data type, raising\n django.core.exceptions.ValidationError if the data can't be converted.\n Returns the converted value. Subclasses should override this.\n \"\"\"\n assert isinstance(value, list)\n\n # convert every value in list\n value = list(value)\n for i, v in enumerate(value):\n value[i] = self.value_to_python(v)\n\n # return result\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n length = len(value) - 8\n if length % 4 != 0:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n length = length // 4\n\n array = struct.unpack('<bbbbbbbb' + 'I' * length, value)\n\n if array[1] != length:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array[2:7] != (0, 0, 0, 0, 0):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = (\"S\", ) + array[0:1] + array[7:]\n return \"-\".join([str(i) for i in array])",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, str):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = value.split(\"-\")\n length = len(array) - 3\n\n if length < 1:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array.pop(0) != \"S\":\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n try:\n [int(i) for i in array]\n except TypeError:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8518143892288208,
0.8449154496192932,
0.8376913070678711,
0.8368150591850281,
0.8314429521560669,
0.8277285695075989,
0.8108909726142883,
0.8054237365722656,
0.7992805242538452,
0.7976009845733643,
0.782760739326477,
0.76715487241745
] |
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
|
def value_validate(self, value):
"""
Validates value and throws ValidationError. Subclasses should override
this to provide validation logic.
"""
if not isinstance(value, six.string_types):
raise tldap.exceptions.ValidationError("should be a string")
|
[
"def validate(self, value, model_instance):\n \"\"\"\n Validates value and throws ValidationError. Subclasses should override\n this to provide validation logic.\n \"\"\"\n return super(self.__class__, self).validate(value.value, model_instance)",
"def validate(self, value):\n \"\"\"\n Validates value and throws ValidationError. Subclasses should override\n this to provide validation logic.\n \"\"\"\n # check object type\n if not isinstance(value, list):\n raise tldap.exceptions.ValidationError(\n \"is not a list and max_instances is %s\" %\n self._max_instances)\n # check maximum instances\n if (self._max_instances is not None and\n len(value) > self._max_instances):\n raise tldap.exceptions.ValidationError(\n \"exceeds max_instances of %d\" %\n self._max_instances)\n # check this required value is given\n if self._required:\n if len(value) == 0:\n raise tldap.exceptions.ValidationError(\n \"is required\")\n # validate the value\n for i, v in enumerate(value):\n self.value_validate(v)",
"def validate(self, value, model_instance):\n \"\"\"\n Validates value and throws ValidationError. Subclasses should override\n this to provide validation logic.\n \"\"\"\n # pylint: disable=newstyle\n super(TimeZoneField, self).validate(\n value=self.get_prep_value(value),\n model_instance=model_instance\n )\n\n # Insure the value is can be converted to a timezone\n self.to_python(value)",
"def validate(self, value):\n \"\"\"\n Returns a cleaned and validated value. Raises a ValidationError\n if there's a problem\n \"\"\"\n if value is None:\n if self.required:\n raise ValidationError('{} - None values are not allowed'.format(self.column_name or self.db_field))\n return value",
"def validate(self, value):\n \"\"\"Performs validation of the value.\n :param value: value to validate\n :raise ValidationError if the value is invalid\"\"\"\n\n # check choices\n if self.choices:\n if isinstance(self.choices[0], (list, tuple)):\n option_keys = [k for k, v in self.choices]\n if value not in option_keys:\n msg = ('Value {0} is not listed among valid choices {1}'.format(value, option_keys))\n self.raise_error(msg)\n elif value not in self.choices:\n msg = ('Value {0} is not listed among valid choices {1}'.format(value, self.choices))\n self.raise_error(msg)",
"def validate(self, value):\n \"\"\"Validate a value for this field. If the field is invalid, this\n will raise a ValueError. Runs ``pre_validate`` hook prior to\n validation, and returns value if validation passes.\"\"\"\n value = self.pre_validate(value)\n if not self._typecheck(value):\n raise ValueError('%r failed type check' % value)\n return value",
"def _validate(self, value):\n \"\"\"Perform validation on ``value``. Raise a :exc:`ValidationError` if validation\n does not succeed.\n \"\"\"\n errors = []\n kwargs = {}\n for validator in self.validators:\n try:\n r = validator(value)\n if not isinstance(validator, Validator) and r is False:\n self.fail('validator_failed')\n except ValidationError as err:\n kwargs.update(err.kwargs)\n if isinstance(err.messages, dict):\n errors.append(err.messages)\n else:\n errors.extend(err.messages)\n if errors:\n raise ValidationError(errors, **kwargs)",
"def clean(self, value):\n \"\"\"Take a dirty value and clean it.\"\"\"\n if (\n self.base_type is not None and\n value is not None and\n not isinstance(value, self.base_type)\n ):\n if isinstance(self.base_type, tuple):\n allowed_types = [typ.__name__ for typ in self.base_type]\n allowed_types_text = ' or '.join(allowed_types)\n else:\n allowed_types_text = self.base_type.__name__\n err_msg = 'Value must be of %s type.' % allowed_types_text\n raise ValidationError(err_msg)\n\n if not self.has_value(value):\n if self.default is not None:\n raise StopValidation(self.default)\n\n if self.required:\n raise ValidationError('This field is required.')\n else:\n raise StopValidation(self.blank_value)\n\n return value",
"def validate(self, value):\n \"\"\"\n Applies the validation criteria.\n Returns value, new value, or None if invalid.\n\n Overload this in derived classes.\n \"\"\"\n try:\n # trap blank fields here\n if not self.blank or value:\n float(value)\n return value\n except ValueError:\n return None",
"def validate(self, value):\n \"\"\"Validate value.\"\"\"\n if self.exclusive:\n if value >= self.maximum_value:\n tpl = \"'{val}' is bigger or equal than maximum ('{max}').\"\n raise ValidationError(\n tpl.format(val=value, max=self.maximum_value))\n else:\n if value > self.maximum_value:\n raise ValidationError(\n \"'{value}' is bigger than maximum ('{max}').\".format(\n value=value, max=self.maximum_value))",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def validate(self, value):\n \"\"\"Validate value.\"\"\"\n if self.exclusive:\n if value <= self.minimum_value:\n tpl = \"'{value}' is lower or equal than minimum ('{min}').\"\n raise ValidationError(\n tpl.format(value=value, min=self.minimum_value))\n else:\n if value < self.minimum_value:\n raise ValidationError(\n \"'{value}' is lower than minimum ('{min}').\".format(\n value=value, min=self.minimum_value))"
] |
[
0.8405061364173889,
0.8216529488563538,
0.7976804971694946,
0.7885225415229797,
0.7805082201957703,
0.7797256708145142,
0.779566764831543,
0.7763097882270813,
0.7717840671539307,
0.7703309655189514,
0.7695854902267456,
0.7688673138618469
] |
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
|
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be bytes")
if value is None:
return value
try:
return int(value)
except (TypeError, ValueError):
raise tldap.exceptions.ValidationError("is invalid integer")
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n value = value.decode(\"utf_8\")\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def to_python(self, value):\n \"\"\"\n Converts the input value into the expected Python data type, raising\n django.core.exceptions.ValidationError if the data can't be converted.\n Returns the converted value. Subclasses should override this.\n \"\"\"\n assert isinstance(value, list)\n\n # convert every value in list\n value = list(value)\n for i, v in enumerate(value):\n value[i] = self.value_to_python(v)\n\n # return result\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n length = len(value) - 8\n if length % 4 != 0:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n length = length // 4\n\n array = struct.unpack('<bbbbbbbb' + 'I' * length, value)\n\n if array[1] != length:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array[2:7] != (0, 0, 0, 0, 0):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = (\"S\", ) + array[0:1] + array[7:]\n return \"-\".join([str(i) for i in array])",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, str):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = value.split(\"-\")\n length = len(array) - 3\n\n if length < 1:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array.pop(0) != \"S\":\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n try:\n [int(i) for i in array]\n except TypeError:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8518143892288208,
0.838757336139679,
0.8376913070678711,
0.8368150591850281,
0.8314429521560669,
0.8277285695075989,
0.8108909726142883,
0.8054237365722656,
0.7992805242538452,
0.7976009845733643,
0.782760739326477,
0.76715487241745
] |
Returns field's single value prepared for saving into a database.
|
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert isinstance(value, six.integer_types)
return str(value).encode("utf_8")
|
[
"def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_value.set(k, '')\n else:\n # Need to convert File objects provided via a form to\n # unicode for database insertion\n prep_value.set(k, six.text_type(v))\n return super().get_prep_value(prep_value)\n return super().get_prep_value(value)",
"def get_prep_value(self, value):\n \"Returns field's value prepared for saving into a database.\"\n value = self.get_clean_value(value)\n if self.multiple:\n if value:\n value = \",\".join(value)\n else:\n value = \"\"\n return super(CharField, self).get_prep_value(value)",
"def value(self):\n \"\"\"Property to be used when saving a custom field into\n :class:`couchbasekit.document.Document` instance.\n\n :returns: The value to be saved for the field within\n :class:`couchbasekit.document.Document` instances.\n :rtype: mixed\n \"\"\"\n if self._value is None:\n raise ValueError(\"%s's 'value' is not set.\" % type(self).__name__)\n return self._value",
"public T getSingleValue() {\n List<T> values = getSelectedValue();\n if (!values.isEmpty()) {\n return values.get(0);\n }\n return null;\n }",
"def get_db_prep_value(self, value, connection=None, prepared=False):\n \"\"\"Returns field's value prepared for interacting with the database\n backend.\n\n Used by the default implementations of ``get_db_prep_save``and\n `get_db_prep_lookup```\n \"\"\"\n if not value:\n return\n if prepared:\n return value\n else:\n assert(isinstance(value, list) or isinstance(value, tuple))\n return self.separator.join([unicode(s) for s in value])",
"def pre_save(self, model_instance, add):\n \"\"\"Returns field's value just before saving.\"\"\"\n value = super().pre_save(model_instance, add)\n if isinstance(value, LocalizedValue):\n for file in value.__dict__.values():\n if file and not file._committed:\n file.save(file.name, file, save=False)\n return value",
"def pre_save(self, *args, **kwargs):\n \"Returns field's value just before saving.\"\n value = super(CharField, self).pre_save(*args, **kwargs)\n return self.get_prep_value(value)",
"def get_db_prep_save(self, value, connection=None):\n \"\"\"\n Returns field's value prepared for saving into a database.\n \"\"\"\n ## convert to settings.TIME_ZONE\n if value is not None:\n if value.tzinfo is None:\n value = default_tz.localize(value)\n else:\n value = value.astimezone(default_tz)\n return super(LocalizedDateTimeField, self).get_db_prep_save(value, connection=connection)",
"def _get_prepped_model_field(model_obj, field):\n \"\"\"\n Gets the value of a field of a model obj that is prepared for the db.\n \"\"\"\n\n # Get the field\n field = model_obj._meta.get_field(field)\n\n # Get the value\n value = field.get_db_prep_save(getattr(model_obj, field.attname), connection)\n\n # Return the value\n return value",
"def get_pk(self, field_val):\n \"\"\"convenience method for running is_pk(_id).get_one() since this is so common\"\"\"\n field_name = self.schema.pk.name\n return self.is_field(field_name, field_val).get_one()",
"def value_to_db(self, value):\n \"\"\" Returns field's single value prepared for saving into a database. \"\"\"\n assert isinstance(value, datetime.datetime)\n\n try:\n value = value - datetime.datetime(1970, 1, 1)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n value = value.seconds + value.days * 24 * 3600\n value = str(value).encode(\"utf_8\")\n\n return value",
"def value(self):\n \"\"\"convenience method to just get one value or tuple of values for the query\"\"\"\n field_vals = None\n field_names = self.fields_select.names()\n fcount = len(field_names)\n if fcount:\n d = self._query('get_one')\n if d:\n field_vals = [d.get(fn, None) for fn in field_names]\n if fcount == 1:\n field_vals = field_vals[0]\n\n else:\n raise ValueError(\"no select fields were set, so cannot return value\")\n\n return field_vals"
] |
[
0.7930915951728821,
0.7762113213539124,
0.7404974699020386,
0.7379361987113953,
0.7321757078170776,
0.7313757538795471,
0.7301207184791565,
0.7283756136894226,
0.7269089221954346,
0.7263267040252686,
0.7263159155845642,
0.7262715101242065
] |
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
|
def value_validate(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, six.integer_types):
raise tldap.exceptions.ValidationError("should be a integer")
try:
return str(value)
except (TypeError, ValueError):
raise tldap.exceptions.ValidationError("is invalid integer")
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n value = value.decode(\"utf_8\")\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def to_python(self, value):\n \"\"\"\n Converts the input value into the expected Python data type, raising\n django.core.exceptions.ValidationError if the data can't be converted.\n Returns the converted value. Subclasses should override this.\n \"\"\"\n assert isinstance(value, list)\n\n # convert every value in list\n value = list(value)\n for i, v in enumerate(value):\n value[i] = self.value_to_python(v)\n\n # return result\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n length = len(value) - 8\n if length % 4 != 0:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n length = length // 4\n\n array = struct.unpack('<bbbbbbbb' + 'I' * length, value)\n\n if array[1] != length:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array[2:7] != (0, 0, 0, 0, 0):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = (\"S\", ) + array[0:1] + array[7:]\n return \"-\".join([str(i) for i in array])",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, str):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = value.split(\"-\")\n length = len(array) - 3\n\n if length < 1:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array.pop(0) != \"S\":\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n try:\n [int(i) for i in array]\n except TypeError:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8518143892288208,
0.8449154496192932,
0.838757336139679,
0.8376913070678711,
0.8368150591850281,
0.8314429521560669,
0.8108909726142883,
0.8054237365722656,
0.7992805242538452,
0.7976009845733643,
0.782760739326477,
0.76715487241745
] |
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
|
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
try:
value = int(value)
except (TypeError, ValueError):
raise tldap.exceptions.ValidationError("is invalid integer")
try:
value = datetime.date.fromtimestamp(value * 24 * 60 * 60)
except OverflowError:
raise tldap.exceptions.ValidationError("is too big a date")
return value
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n value = value.decode(\"utf_8\")\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def to_python(self, value):\n \"\"\"\n Converts the input value into the expected Python data type, raising\n django.core.exceptions.ValidationError if the data can't be converted.\n Returns the converted value. Subclasses should override this.\n \"\"\"\n assert isinstance(value, list)\n\n # convert every value in list\n value = list(value)\n for i, v in enumerate(value):\n value[i] = self.value_to_python(v)\n\n # return result\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n length = len(value) - 8\n if length % 4 != 0:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n length = length // 4\n\n array = struct.unpack('<bbbbbbbb' + 'I' * length, value)\n\n if array[1] != length:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array[2:7] != (0, 0, 0, 0, 0):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = (\"S\", ) + array[0:1] + array[7:]\n return \"-\".join([str(i) for i in array])",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, str):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = value.split(\"-\")\n length = len(array) - 3\n\n if length < 1:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array.pop(0) != \"S\":\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n try:\n [int(i) for i in array]\n except TypeError:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8449154496192932,
0.838757336139679,
0.8376913070678711,
0.8368150591850281,
0.8314429521560669,
0.8277285695075989,
0.8108909726142883,
0.8054237365722656,
0.7992805242538452,
0.7976009845733643,
0.782760739326477,
0.76715487241745
] |
Returns field's single value prepared for saving into a database.
|
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert isinstance(value, datetime.date)
assert not isinstance(value, datetime.datetime)
try:
value = value - datetime.date(year=1970, month=1, day=1)
except OverflowError:
raise tldap.exceptions.ValidationError("is too big a date")
return str(value.days).encode("utf_8")
|
[
"def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_value.set(k, '')\n else:\n # Need to convert File objects provided via a form to\n # unicode for database insertion\n prep_value.set(k, six.text_type(v))\n return super().get_prep_value(prep_value)\n return super().get_prep_value(value)",
"def get_prep_value(self, value):\n \"Returns field's value prepared for saving into a database.\"\n value = self.get_clean_value(value)\n if self.multiple:\n if value:\n value = \",\".join(value)\n else:\n value = \"\"\n return super(CharField, self).get_prep_value(value)",
"def value(self):\n \"\"\"Property to be used when saving a custom field into\n :class:`couchbasekit.document.Document` instance.\n\n :returns: The value to be saved for the field within\n :class:`couchbasekit.document.Document` instances.\n :rtype: mixed\n \"\"\"\n if self._value is None:\n raise ValueError(\"%s's 'value' is not set.\" % type(self).__name__)\n return self._value",
"public T getSingleValue() {\n List<T> values = getSelectedValue();\n if (!values.isEmpty()) {\n return values.get(0);\n }\n return null;\n }",
"def get_db_prep_value(self, value, connection=None, prepared=False):\n \"\"\"Returns field's value prepared for interacting with the database\n backend.\n\n Used by the default implementations of ``get_db_prep_save``and\n `get_db_prep_lookup```\n \"\"\"\n if not value:\n return\n if prepared:\n return value\n else:\n assert(isinstance(value, list) or isinstance(value, tuple))\n return self.separator.join([unicode(s) for s in value])",
"def pre_save(self, model_instance, add):\n \"\"\"Returns field's value just before saving.\"\"\"\n value = super().pre_save(model_instance, add)\n if isinstance(value, LocalizedValue):\n for file in value.__dict__.values():\n if file and not file._committed:\n file.save(file.name, file, save=False)\n return value",
"def pre_save(self, *args, **kwargs):\n \"Returns field's value just before saving.\"\n value = super(CharField, self).pre_save(*args, **kwargs)\n return self.get_prep_value(value)",
"def get_db_prep_save(self, value, connection=None):\n \"\"\"\n Returns field's value prepared for saving into a database.\n \"\"\"\n ## convert to settings.TIME_ZONE\n if value is not None:\n if value.tzinfo is None:\n value = default_tz.localize(value)\n else:\n value = value.astimezone(default_tz)\n return super(LocalizedDateTimeField, self).get_db_prep_save(value, connection=connection)",
"def _get_prepped_model_field(model_obj, field):\n \"\"\"\n Gets the value of a field of a model obj that is prepared for the db.\n \"\"\"\n\n # Get the field\n field = model_obj._meta.get_field(field)\n\n # Get the value\n value = field.get_db_prep_save(getattr(model_obj, field.attname), connection)\n\n # Return the value\n return value",
"def get_pk(self, field_val):\n \"\"\"convenience method for running is_pk(_id).get_one() since this is so common\"\"\"\n field_name = self.schema.pk.name\n return self.is_field(field_name, field_val).get_one()",
"def value_to_db(self, value):\n \"\"\" Returns field's single value prepared for saving into a database. \"\"\"\n assert isinstance(value, datetime.datetime)\n\n try:\n value = value - datetime.datetime(1970, 1, 1)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n value = value.seconds + value.days * 24 * 3600\n value = str(value).encode(\"utf_8\")\n\n return value",
"def value(self):\n \"\"\"convenience method to just get one value or tuple of values for the query\"\"\"\n field_vals = None\n field_names = self.fields_select.names()\n fcount = len(field_names)\n if fcount:\n d = self._query('get_one')\n if d:\n field_vals = [d.get(fn, None) for fn in field_names]\n if fcount == 1:\n field_vals = field_vals[0]\n\n else:\n raise ValueError(\"no select fields were set, so cannot return value\")\n\n return field_vals"
] |
[
0.7930915951728821,
0.7762113213539124,
0.7404974699020386,
0.7379361987113953,
0.7321757078170776,
0.7313757538795471,
0.7301207184791565,
0.7283756136894226,
0.7269089221954346,
0.7263267040252686,
0.7263159155845642,
0.7262715101242065
] |
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
|
def value_validate(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, datetime.date):
raise tldap.exceptions.ValidationError("is invalid date")
# a datetime is also a date but they are not compatable
if isinstance(value, datetime.datetime):
raise tldap.exceptions.ValidationError("should be a date, not a datetime")
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n value = value.decode(\"utf_8\")\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def to_python(self, value):\n \"\"\"\n Converts the input value into the expected Python data type, raising\n django.core.exceptions.ValidationError if the data can't be converted.\n Returns the converted value. Subclasses should override this.\n \"\"\"\n assert isinstance(value, list)\n\n # convert every value in list\n value = list(value)\n for i, v in enumerate(value):\n value[i] = self.value_to_python(v)\n\n # return result\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n length = len(value) - 8\n if length % 4 != 0:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n length = length // 4\n\n array = struct.unpack('<bbbbbbbb' + 'I' * length, value)\n\n if array[1] != length:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array[2:7] != (0, 0, 0, 0, 0):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = (\"S\", ) + array[0:1] + array[7:]\n return \"-\".join([str(i) for i in array])",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, str):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = value.split(\"-\")\n length = len(array) - 3\n\n if length < 1:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array.pop(0) != \"S\":\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n try:\n [int(i) for i in array]\n except TypeError:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8518143892288208,
0.8449154496192932,
0.838757336139679,
0.8368150591850281,
0.8314429521560669,
0.8277285695075989,
0.8108909726142883,
0.8054237365722656,
0.7992805242538452,
0.7976009845733643,
0.782760739326477,
0.76715487241745
] |
Returns field's single value prepared for saving into a database.
|
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert isinstance(value, datetime.datetime)
try:
value = value - datetime.datetime(1970, 1, 1)
except OverflowError:
raise tldap.exceptions.ValidationError("is too big a date")
value = value.seconds + value.days * 24 * 3600
value = str(value).encode("utf_8")
return value
|
[
"def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_value.set(k, '')\n else:\n # Need to convert File objects provided via a form to\n # unicode for database insertion\n prep_value.set(k, six.text_type(v))\n return super().get_prep_value(prep_value)\n return super().get_prep_value(value)",
"def get_prep_value(self, value):\n \"Returns field's value prepared for saving into a database.\"\n value = self.get_clean_value(value)\n if self.multiple:\n if value:\n value = \",\".join(value)\n else:\n value = \"\"\n return super(CharField, self).get_prep_value(value)",
"def value(self):\n \"\"\"Property to be used when saving a custom field into\n :class:`couchbasekit.document.Document` instance.\n\n :returns: The value to be saved for the field within\n :class:`couchbasekit.document.Document` instances.\n :rtype: mixed\n \"\"\"\n if self._value is None:\n raise ValueError(\"%s's 'value' is not set.\" % type(self).__name__)\n return self._value",
"public T getSingleValue() {\n List<T> values = getSelectedValue();\n if (!values.isEmpty()) {\n return values.get(0);\n }\n return null;\n }",
"def get_db_prep_value(self, value, connection=None, prepared=False):\n \"\"\"Returns field's value prepared for interacting with the database\n backend.\n\n Used by the default implementations of ``get_db_prep_save``and\n `get_db_prep_lookup```\n \"\"\"\n if not value:\n return\n if prepared:\n return value\n else:\n assert(isinstance(value, list) or isinstance(value, tuple))\n return self.separator.join([unicode(s) for s in value])",
"def pre_save(self, model_instance, add):\n \"\"\"Returns field's value just before saving.\"\"\"\n value = super().pre_save(model_instance, add)\n if isinstance(value, LocalizedValue):\n for file in value.__dict__.values():\n if file and not file._committed:\n file.save(file.name, file, save=False)\n return value",
"def pre_save(self, *args, **kwargs):\n \"Returns field's value just before saving.\"\n value = super(CharField, self).pre_save(*args, **kwargs)\n return self.get_prep_value(value)",
"def get_db_prep_save(self, value, connection=None):\n \"\"\"\n Returns field's value prepared for saving into a database.\n \"\"\"\n ## convert to settings.TIME_ZONE\n if value is not None:\n if value.tzinfo is None:\n value = default_tz.localize(value)\n else:\n value = value.astimezone(default_tz)\n return super(LocalizedDateTimeField, self).get_db_prep_save(value, connection=connection)",
"def _get_prepped_model_field(model_obj, field):\n \"\"\"\n Gets the value of a field of a model obj that is prepared for the db.\n \"\"\"\n\n # Get the field\n field = model_obj._meta.get_field(field)\n\n # Get the value\n value = field.get_db_prep_save(getattr(model_obj, field.attname), connection)\n\n # Return the value\n return value",
"def get_pk(self, field_val):\n \"\"\"convenience method for running is_pk(_id).get_one() since this is so common\"\"\"\n field_name = self.schema.pk.name\n return self.is_field(field_name, field_val).get_one()",
"def value(self):\n \"\"\"convenience method to just get one value or tuple of values for the query\"\"\"\n field_vals = None\n field_names = self.fields_select.names()\n fcount = len(field_names)\n if fcount:\n d = self._query('get_one')\n if d:\n field_vals = [d.get(fn, None) for fn in field_names]\n if fcount == 1:\n field_vals = field_vals[0]\n\n else:\n raise ValueError(\"no select fields were set, so cannot return value\")\n\n return field_vals",
"def previous(self, field):\n \"\"\"Returns currently saved value of given field\"\"\"\n\n # handle deferred fields that have not yet been loaded from the database\n if self.instance.pk and field in self.deferred_fields and field not in self.saved_data:\n\n # if the field has not been assigned locally, simply fetch and un-defer the value\n if field not in self.instance.__dict__:\n self.get_field_value(field)\n\n # if the field has been assigned locally, store the local value, fetch the database value,\n # store database value to saved_data, and restore the local value\n else:\n current_value = self.get_field_value(field)\n self.instance.refresh_from_db(fields=[field])\n self.saved_data[field] = deepcopy(self.get_field_value(field))\n setattr(self.instance, self.field_map[field], current_value)\n\n return self.saved_data.get(field)"
] |
[
0.7930915951728821,
0.7762113213539124,
0.7404974699020386,
0.7379361987113953,
0.7321757078170776,
0.7313757538795471,
0.7301207184791565,
0.7283756136894226,
0.7269089221954346,
0.7263267040252686,
0.7262715101242065,
0.7243165969848633
] |
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
|
def value_validate(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, datetime.datetime):
raise tldap.exceptions.ValidationError("is invalid date time")
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n value = value.decode(\"utf_8\")\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def to_python(self, value):\n \"\"\"\n Converts the input value into the expected Python data type, raising\n django.core.exceptions.ValidationError if the data can't be converted.\n Returns the converted value. Subclasses should override this.\n \"\"\"\n assert isinstance(value, list)\n\n # convert every value in list\n value = list(value)\n for i, v in enumerate(value):\n value[i] = self.value_to_python(v)\n\n # return result\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n length = len(value) - 8\n if length % 4 != 0:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n length = length // 4\n\n array = struct.unpack('<bbbbbbbb' + 'I' * length, value)\n\n if array[1] != length:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array[2:7] != (0, 0, 0, 0, 0):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = (\"S\", ) + array[0:1] + array[7:]\n return \"-\".join([str(i) for i in array])",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, str):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = value.split(\"-\")\n length = len(array) - 3\n\n if length < 1:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array.pop(0) != \"S\":\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n try:\n [int(i) for i in array]\n except TypeError:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8518143892288208,
0.8449154496192932,
0.838757336139679,
0.8376913070678711,
0.8314429521560669,
0.8277285695075989,
0.8108909726142883,
0.8054237365722656,
0.7992805242538452,
0.7976009845733643,
0.782760739326477,
0.76715487241745
] |
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
|
def value_to_python(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, bytes):
raise tldap.exceptions.ValidationError("should be a bytes")
length = len(value) - 8
if length % 4 != 0:
raise tldap.exceptions.ValidationError("Invalid sid")
length = length // 4
array = struct.unpack('<bbbbbbbb' + 'I' * length, value)
if array[1] != length:
raise tldap.exceptions.ValidationError("Invalid sid")
if array[2:7] != (0, 0, 0, 0, 0):
raise tldap.exceptions.ValidationError("Invalid sid")
array = ("S", ) + array[0:1] + array[7:]
return "-".join([str(i) for i in array])
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n value = value.decode(\"utf_8\")\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def to_python(self, value):\n \"\"\"\n Converts the input value into the expected Python data type, raising\n django.core.exceptions.ValidationError if the data can't be converted.\n Returns the converted value. Subclasses should override this.\n \"\"\"\n assert isinstance(value, list)\n\n # convert every value in list\n value = list(value)\n for i, v in enumerate(value):\n value[i] = self.value_to_python(v)\n\n # return result\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, str):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = value.split(\"-\")\n length = len(array) - 3\n\n if length < 1:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array.pop(0) != \"S\":\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n try:\n [int(i) for i in array]\n except TypeError:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8518143892288208,
0.8449154496192932,
0.838757336139679,
0.8376913070678711,
0.8368150591850281,
0.8314429521560669,
0.8277285695075989,
0.8054237365722656,
0.7992805242538452,
0.7976009845733643,
0.782760739326477,
0.76715487241745
] |
Returns field's single value prepared for saving into a database.
|
def value_to_db(self, value):
""" Returns field's single value prepared for saving into a database. """
assert isinstance(value, str)
array = value.split("-")
length = len(array) - 3
assert length >= 0
assert array[0] == 'S'
array = array[1:2] + [length, 0, 0, 0, 0, 0] + array[2:]
array = [int(i) for i in array]
return struct.pack('<bbbbbbbb' + 'I' * length, *array)
|
[
"def get_prep_value(self, value):\n \"\"\"Returns field's value prepared for saving into a database.\"\"\"\n\n if isinstance(value, LocalizedValue):\n prep_value = LocalizedValue()\n for k, v in value.__dict__.items():\n if v is None:\n prep_value.set(k, '')\n else:\n # Need to convert File objects provided via a form to\n # unicode for database insertion\n prep_value.set(k, six.text_type(v))\n return super().get_prep_value(prep_value)\n return super().get_prep_value(value)",
"def get_prep_value(self, value):\n \"Returns field's value prepared for saving into a database.\"\n value = self.get_clean_value(value)\n if self.multiple:\n if value:\n value = \",\".join(value)\n else:\n value = \"\"\n return super(CharField, self).get_prep_value(value)",
"def value(self):\n \"\"\"Property to be used when saving a custom field into\n :class:`couchbasekit.document.Document` instance.\n\n :returns: The value to be saved for the field within\n :class:`couchbasekit.document.Document` instances.\n :rtype: mixed\n \"\"\"\n if self._value is None:\n raise ValueError(\"%s's 'value' is not set.\" % type(self).__name__)\n return self._value",
"public T getSingleValue() {\n List<T> values = getSelectedValue();\n if (!values.isEmpty()) {\n return values.get(0);\n }\n return null;\n }",
"def get_db_prep_value(self, value, connection=None, prepared=False):\n \"\"\"Returns field's value prepared for interacting with the database\n backend.\n\n Used by the default implementations of ``get_db_prep_save``and\n `get_db_prep_lookup```\n \"\"\"\n if not value:\n return\n if prepared:\n return value\n else:\n assert(isinstance(value, list) or isinstance(value, tuple))\n return self.separator.join([unicode(s) for s in value])",
"def pre_save(self, model_instance, add):\n \"\"\"Returns field's value just before saving.\"\"\"\n value = super().pre_save(model_instance, add)\n if isinstance(value, LocalizedValue):\n for file in value.__dict__.values():\n if file and not file._committed:\n file.save(file.name, file, save=False)\n return value",
"def pre_save(self, *args, **kwargs):\n \"Returns field's value just before saving.\"\n value = super(CharField, self).pre_save(*args, **kwargs)\n return self.get_prep_value(value)",
"def get_db_prep_save(self, value, connection=None):\n \"\"\"\n Returns field's value prepared for saving into a database.\n \"\"\"\n ## convert to settings.TIME_ZONE\n if value is not None:\n if value.tzinfo is None:\n value = default_tz.localize(value)\n else:\n value = value.astimezone(default_tz)\n return super(LocalizedDateTimeField, self).get_db_prep_save(value, connection=connection)",
"def _get_prepped_model_field(model_obj, field):\n \"\"\"\n Gets the value of a field of a model obj that is prepared for the db.\n \"\"\"\n\n # Get the field\n field = model_obj._meta.get_field(field)\n\n # Get the value\n value = field.get_db_prep_save(getattr(model_obj, field.attname), connection)\n\n # Return the value\n return value",
"def get_pk(self, field_val):\n \"\"\"convenience method for running is_pk(_id).get_one() since this is so common\"\"\"\n field_name = self.schema.pk.name\n return self.is_field(field_name, field_val).get_one()",
"def value_to_db(self, value):\n \"\"\" Returns field's single value prepared for saving into a database. \"\"\"\n assert isinstance(value, datetime.datetime)\n\n try:\n value = value - datetime.datetime(1970, 1, 1)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n value = value.seconds + value.days * 24 * 3600\n value = str(value).encode(\"utf_8\")\n\n return value",
"def value(self):\n \"\"\"convenience method to just get one value or tuple of values for the query\"\"\"\n field_vals = None\n field_names = self.fields_select.names()\n fcount = len(field_names)\n if fcount:\n d = self._query('get_one')\n if d:\n field_vals = [d.get(fn, None) for fn in field_names]\n if fcount == 1:\n field_vals = field_vals[0]\n\n else:\n raise ValueError(\"no select fields were set, so cannot return value\")\n\n return field_vals"
] |
[
0.7930915951728821,
0.7762113213539124,
0.7404974699020386,
0.7379361987113953,
0.7321757078170776,
0.7313757538795471,
0.7301207184791565,
0.7283756136894226,
0.7269089221954346,
0.7263267040252686,
0.7263159155845642,
0.7262715101242065
] |
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
|
def value_validate(self, value):
"""
Converts the input single value into the expected Python data type,
raising django.core.exceptions.ValidationError if the data can't be
converted. Returns the converted value. Subclasses should override
this.
"""
if not isinstance(value, str):
raise tldap.exceptions.ValidationError("Invalid sid")
array = value.split("-")
length = len(array) - 3
if length < 1:
raise tldap.exceptions.ValidationError("Invalid sid")
if array.pop(0) != "S":
raise tldap.exceptions.ValidationError("Invalid sid")
try:
[int(i) for i in array]
except TypeError:
raise tldap.exceptions.ValidationError("Invalid sid")
|
[
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n try:\n value = int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")\n\n try:\n value = datetime.date.fromtimestamp(value * 24 * 60 * 60)\n except OverflowError:\n raise tldap.exceptions.ValidationError(\"is too big a date\")\n\n return value",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be bytes\")\n if value is None:\n return value\n try:\n return int(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n value = value.decode(\"utf_8\")\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.date):\n raise tldap.exceptions.ValidationError(\"is invalid date\")\n # a datetime is also a date but they are not compatable\n if isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"should be a date, not a datetime\")",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, datetime.datetime):\n raise tldap.exceptions.ValidationError(\"is invalid date time\")",
"def to_python(self, value):\n \"\"\"\n Converts the input value into the expected Python data type, raising\n django.core.exceptions.ValidationError if the data can't be converted.\n Returns the converted value. Subclasses should override this.\n \"\"\"\n assert isinstance(value, list)\n\n # convert every value in list\n value = list(value)\n for i, v in enumerate(value):\n value[i] = self.value_to_python(v)\n\n # return result\n return value",
"def value_validate(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, six.integer_types):\n raise tldap.exceptions.ValidationError(\"should be a integer\")\n\n try:\n return str(value)\n except (TypeError, ValueError):\n raise tldap.exceptions.ValidationError(\"is invalid integer\")",
"def value_to_python(self, value):\n \"\"\"\n Converts the input single value into the expected Python data type,\n raising django.core.exceptions.ValidationError if the data can't be\n converted. Returns the converted value. Subclasses should override\n this.\n \"\"\"\n if not isinstance(value, bytes):\n raise tldap.exceptions.ValidationError(\"should be a bytes\")\n\n length = len(value) - 8\n if length % 4 != 0:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n length = length // 4\n\n array = struct.unpack('<bbbbbbbb' + 'I' * length, value)\n\n if array[1] != length:\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n if array[2:7] != (0, 0, 0, 0, 0):\n raise tldap.exceptions.ValidationError(\"Invalid sid\")\n\n array = (\"S\", ) + array[0:1] + array[7:]\n return \"-\".join([str(i) for i in array])",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if isinstance(value, dict):\n return value\n\n if self.blank and not value:\n return None\n\n if isinstance(value, string_types):\n try:\n return json.loads(value)\n except Exception as e:\n raise ValidationError(str(e))\n\n return value",
"def to_python(self, value):\n \"\"\"\n Convert the input JSON value into python structures, raises\n django.core.exceptions.ValidationError if the data can't be converted.\n \"\"\"\n if self.blank and not value:\n return {}\n value = value or '{}'\n if isinstance(value, six.binary_type):\n value = six.text_type(value, 'utf-8')\n if isinstance(value, six.string_types):\n try:\n # with django 1.6 i have '\"{}\"' as default value here\n if value[0] == value[-1] == '\"':\n value = value[1:-1]\n\n return json.loads(value)\n except Exception as err:\n raise ValidationError(str(err))\n else:\n return value",
"def to_internal_value(self, data):\n \"\"\"\n Dicts of native values <- Dicts of primitive datatypes.\n \"\"\"\n\n if html.is_html_input(data):\n data = html.parse_html_dict(data)\n if not isinstance(data, dict):\n self.fail('not_a_dict', input_type=type(data).__name__)\n if not self.allow_empty and len(data.keys()) == 0:\n message = self.error_messages['empty']\n raise ValidationError({\n api_settings.NON_FIELD_ERRORS_KEY: [message]\n })\n return {\n six.text_type(key): self.child.run_validation(value)\n for key, value in data.items()\n }",
"def to_python(self, value):\n \"\"\"\n \"Called during deserialization and during the clean() method used\n from forms.... [s]hould deal gracefully with... (*) an instance of\n the correct type; (*) a string; (*) None (if the field allows\n null=True).\"\n\n \"For ``to_python()``, if anything goes wrong during value conversion,\n you should raise a ``ValidationError`` exception.\"\n \"\"\"\n if value is None:\n return value\n if not isinstance(value, str):\n return value\n try:\n return json_decode(value)\n except Exception as err:\n raise ValidationError(repr(err))"
] |
[
0.8518143892288208,
0.8449154496192932,
0.838757336139679,
0.8376913070678711,
0.8368150591850281,
0.8314429521560669,
0.8277285695075989,
0.8108909726142883,
0.8054237365722656,
0.7992805242538452,
0.782760739326477,
0.76715487241745
] |
Get data for this component
|
def get(self, id):
"""Get data for this component
"""
id = self.as_id(id)
url = '%s/%s' % (self, id)
response = self.http.get(url, auth=self.auth)
response.raise_for_status()
return response.json()
|
[
"public Object doGetData()\n {\n String data = (String)super.doGetData();\n FileListener listener = this.getRecord().getListener(PropertiesStringFileListener.class);\n if (this.getComponent(0) == null) // Don't convert if this is linked to a screen\n if (enableConversion)\n if (listener != null)\n if (listener.isEnabled())\n data = Utility.replaceResources(data, null, null, this.getRecord().getRecordOwner(), true);\n return data;\n }",
"def get_info(self, component):\n \"\"\" Get the information about this component \"\"\"\n\n # Grab it, clean it and ship it\n work_results = self._get_work_results('info', component)\n return self.data_store.clean_for_serialization(work_results)",
"def get_shared_data(component):\n \"\"\"\n Returns the actual list of component data based on how data is\n stored in component, either from the `data` attribute or from the\n `data['content']` attribute.\n\n Returns:\n list: List of component data.\n \"\"\"\n if component:\n return (copy.deepcopy(component.data)\n if 'content' not in component.data\n else copy.deepcopy(component.data['content']))\n else:\n return []",
"public static function componentDataDefinition() {\n $data_definition = parent::componentDataDefinition();\n\n // Set up the entity type functionality preset options.\n $data_definition['functionality']['presets'] = [\n 'fieldable' => [\n 'label' => 'Fieldable - allows custom fields',\n // TODO: Not supported yet; will work on 3.3.x.\n 'description' => \"Whether this entity type allows custom fields.\",\n // No actual data, as the field_ui_base_route depends on whether this\n // is a bundle entity!\n // TODO: this would work if bundle entity is a subclass generator.\n 'data' => [\n ],\n ],\n 'revisionable' => [\n 'label' => 'Revisionable - entities can have multiple revisions',\n 'data' => [\n 'force' => [\n 'entity_keys' => [\n 'value' => [\n 'revision' => 'revision_id',\n ],\n ],\n ],\n ],\n ],\n 'translatable' => [\n 'label' => 'Translatable - entities can be translated',\n 'data' => [\n 'force' => [\n 'entity_keys' => [\n 'value' => [\n 'langcode' => 'langcode',\n ],\n ],\n ],\n ],\n ],\n 'changed' => [\n 'label' => \"Changed - entities store a timetamp for their last change; implement EntityChangedInterface\",\n 'data' => [\n 'force' => [\n 'interface_parents' => [\n 'value' => ['\\Drupal\\Core\\Entity\\EntityChangedInterface'],\n ],\n 'traits' => [\n 'value' => ['\\Drupal\\Core\\Entity\\EntityChangedTrait'],\n ],\n ],\n ],\n ],\n 'owner' => [\n 'label' => \"Owner - entities each have an owner; implement EntityOwnerInterface\",\n 'data' => [\n 'force' => [\n 'interface_parents' => [\n 'value' => ['\\Drupal\\user\\EntityOwnerInterface'],\n ],\n 'entity_keys' => [\n 'value' => [\n 'uid' => 'uid',\n ],\n ],\n // TODO: handle base field here?\n // TODO: handle faffy callback method?\n ],\n ],\n ],\n 'published' => [\n 'label' => \"Published - entities have a field indicating whether they are published or not; implement EntityPublishedInterface\",\n 'data' => [\n 'force' => [\n 'interface_parents' => [\n 'value' => ['\\Drupal\\Core\\Entity\\EntityPublishedInterface'],\n ],\n 'traits' => [\n 'value' => ['\\Drupal\\Core\\Entity\\EntityPublishedTrait'],\n ],\n 'entity_keys' => [\n 'value' => [\n 'published' => 'status',\n ],\n ],\n 'base_fields_helper_methods' => [\n 'value' => ['publishedBaseFieldDefinitions'],\n ],\n ],\n ],\n ],\n ];\n $data_definition['functionality']['default'] = [\n 'fieldable',\n 'revisionable',\n 'translatable',\n ];\n\n $bundle_entity_properties = [\n // Single place to compute a bundle entity type ID. Here rather than in\n // the bundle generator, as this component needs it too.\n // This is always computed, even when there is no bundle entity selected.\n 'bundle_entity_type_id' => [\n 'computed' => TRUE,\n 'default' => function($component_data) {\n if (!empty($component_data['entity_type_id'])) {\n return $component_data['entity_type_id'] . '_type';\n }\n else {\n return '';\n }\n }\n ],\n 'bundle_entity' => [\n 'label' => 'Bundle config entity type',\n 'description' => \"Creates a config entity type which provides the bundles for this entity type. \"\n . \"This is analogous to the Node Type entity type providing bundles for the Node entity type.\",\n 'format' => 'compound',\n 'cardinality' => 1,\n 'component_type' => 'ConfigBundleEntityType',\n 'default' => function($component_data) {\n return [\n 0 => [\n // Default values for the benefit of progressive UIs.\n // The bundle entity type ID defaults to CONTENT_TYPE_type.\n // Note this doesn't work in tests or non-progressive UIs!\n 'entity_type_id' => $component_data['entity_type_id'] . '_type',\n 'bundle_of_entity' => $component_data['entity_type_id'],\n ],\n ];\n },\n ],\n 'bundle_label' => [\n 'computed' => TRUE,\n 'default' => function($component_data) {\n // TODO: get the actual value of the entity_type_label property from\n // the bundle entity -- but this is proving rather labyrinthine...\n return CaseString::snake($component_data['bundle_entity_type_id'])->title();\n },\n ],\n 'field_ui_base_route' => [\n 'label' => 'Field UI base route',\n // TODO: expose to UI in 3.3 when we have dynamic defaults.\n // This will then be dependent on the 'fieldable' property.\n 'computed' => TRUE,\n 'default' => function($component_data) {\n if (!in_array('fieldable', $component_data['functionality'])) {\n return NULL;\n }\n\n if (isset($component_data['bundle_entity'][0])) {\n return 'entity.' . $component_data['bundle_entity_type_id'] . '.edit_form';\n }\n else {\n return 'entity.' . $component_data['entity_type_id'] . '.admin_form';\n }\n },\n ],\n ];\n InsertArray::insertAfter($data_definition, 'entity_ui', $bundle_entity_properties);\n\n $base_fields_properties = [\n 'base_fields' => [\n 'label' => 'Base fields',\n 'description' => \"The base fields for this content entity.\",\n 'format' => 'compound',\n // TODO: default, populated by things such as interface choice!\n 'properties' => [\n 'name' => [\n 'label' => 'Field name',\n 'required' => TRUE,\n ],\n 'label' => [\n 'label' => 'Field label',\n 'default' => function($component_data) {\n $entity_type_id = $component_data['name'];\n return CaseString::snake($entity_type_id)->title();\n },\n 'process_default' => TRUE,\n ],\n 'type' => [\n 'label' => 'Field type',\n 'required' => TRUE,\n 'options' => 'ReportFieldTypes:listFieldTypesOptions',\n ],\n // TODO: options for revisionable and translatable in 3.3.x once\n // we have conditional properties.\n ],\n ],\n // Helper methods from traits that baseFieldDefinitions() should call.\n 'base_fields_helper_methods' => [\n 'internal' => TRUE,\n 'format' => 'array',\n 'default' => [],\n ],\n ];\n InsertArray::insertAfter($data_definition, 'interface_parents', $base_fields_properties);\n\n $data_definition['parent_class_name']['default'] = '\\Drupal\\Core\\Entity\\ContentEntityBase';\n $data_definition['interface_parents']['processing'] = function($value, &$component_data, $property_name, &$property_info) {\n array_unshift($value, '\\Drupal\\Core\\Entity\\ContentEntityInterface');\n $component_data[$property_name] = $value;\n };\n\n // Set the computed value for entity keys. This is done in 'processing'\n // rather than 'default' so we can run after the preset values are applied\n // to add defaults and set the ordering.\n $data_definition['entity_keys']['processing'] = function($value, &$component_data, $property_name, &$property_info) {\n $value += [\n 'id' => $component_data['entity_type_id'] . '_id',\n 'label' => 'title',\n 'uuid' => 'uuid',\n ];\n\n if (isset($component_data['bundle_entity'][0])) {\n $value['bundle'] = 'type';\n }\n\n // Apply a standard ordering to the keys.\n $entity_key_ordering = [\n 'id',\n 'label',\n 'uuid',\n 'bundle',\n 'revision',\n 'langcode',\n 'uid',\n 'published',\n ];\n\n $ordered_value = [];\n foreach ($entity_key_ordering as $key) {\n if (isset($value[$key])) {\n $ordered_value[$key] = $value[$key];\n }\n }\n\n $component_data[$property_name] = $ordered_value;\n };\n\n return $data_definition;\n }",
"function Component_get(name) {\n if (this.has(name)) {\n return this.components[name];\n }\n throw new error.DataError({name: name}, 'Component \"' + name + '\" is not registered in system');\n }",
"def data(self):\n \"\"\"returns the reference to the data functions as a class\"\"\"\n if self._resources is None:\n self.__init()\n if \"data\" in self._resources:\n url = self._url + \"/data\"\n return _data.Data(url=url,\n securityHandler=self._securityHandler,\n initialize=True,\n proxy_url=self._proxy_url,\n proxy_port=self._proxy_port)\n else:\n return None",
"def data(self):\n \"\"\"this property just calls ``get_data``\n but here you can serilalize your data or render as html\n these data will be saved to self.cached_content\n also will be accessable from template\n \"\"\"\n if self.is_obsolete():\n self.update_cache(self.get_data())\n return self.cache_data",
"function getComponentData(data) {\n const component = findComponent(data.id);\n component.template = pathTrimStart(path.join(data.path, `${data.id}.html`));\n return component;\n}",
"def data(self, index, role=Qt.DisplayRole):\r\n \"\"\"Cell content\"\"\"\r\n if not index.isValid():\r\n return to_qvariant()\r\n value = self.get_value(index)\r\n if index.column() == 3 and self.remote:\r\n value = value['view']\r\n if index.column() == 3:\r\n display = value_to_display(value, minmax=self.minmax)\r\n else:\r\n if is_type_text_string(value):\r\n display = to_text_string(value, encoding=\"utf-8\")\r\n else:\r\n display = to_text_string(value)\r\n if role == Qt.DisplayRole:\r\n return to_qvariant(display)\r\n elif role == Qt.EditRole:\r\n return to_qvariant(value_to_display(value))\r\n elif role == Qt.TextAlignmentRole:\r\n if index.column() == 3:\r\n if len(display.splitlines()) < 3:\r\n return to_qvariant(int(Qt.AlignLeft|Qt.AlignVCenter))\r\n else:\r\n return to_qvariant(int(Qt.AlignLeft|Qt.AlignTop))\r\n else:\r\n return to_qvariant(int(Qt.AlignLeft|Qt.AlignVCenter))\r\n elif role == Qt.BackgroundColorRole:\r\n return to_qvariant( self.get_bgcolor(index) )\r\n elif role == Qt.FontRole:\r\n return to_qvariant(get_font(font_size_delta=DEFAULT_SMALL_DELTA))\r\n return to_qvariant()",
"function getData()\n\t\t{\n\n\n\t\t var conn = skynet.createConnection({\n \t\t\"uuid\": currentSettings.uuid,\n \t\t\"token\": currentSettings.token,\n \t\t\"server\": currentSettings.server, \n \t\t\"port\": currentSettings.port\n \t\t\t\t});\t\n\t\t\t \n\t\t\t conn.on('ready', function(data){\t\n\n\t\t\t \tconn.on('message', function(message){\n\n \t\t\t\tvar newData = message;\n \t\t\t\tupdateCallback(newData);\n\n \t\t\t\t\t\t });\n\n\t\t\t });\n\t\t\t}",
"def get(self, id=None, **kwargs):\n \"\"\"\n https://docs.cachethq.io/docs/get-components\n https://docs.cachethq.io/docs/get-a-component\n \"\"\"\n if id is not None:\n return self._get('components/%s' % id, data=kwargs)\n elif 'params' in kwargs:\n data = dict(kwargs)\n params = data.pop('params')\n return self._get('components', data=data, params=params)\n else:\n return self._get('components', data=kwargs)",
"public Object getData() \n {\n Object objKey = this.getField().getData();\n Object objValue = null;\n if (this.isCacheValue(objKey))\n objValue = this.getCacheValue(objKey);\n else\n {\n objValue = super.getData();\n this.cacheValue(objKey, objValue);\n }\n return objValue;\n }"
] |
[
0.7479872703552246,
0.7313993573188782,
0.715919017791748,
0.7088156342506409,
0.7082095742225647,
0.7074160575866699,
0.7063506245613098,
0.7022608518600464,
0.7014139890670776,
0.7011657953262329,
0.6999324560165405,
0.6983368992805481
] |
Create a new component
|
def create(self, data):
"""Create a new component
"""
response = self.http.post(str(self), json=data, auth=self.auth)
response.raise_for_status()
return response.json()
|
[
"def create(self, name, status, description=\"\", link=\"\", order=0,\n group_id=0, enabled=True):\n \"\"\"Create a new component\n\n :param str name: Name of the component\n :param int status: Status of the component; 1-4\n :param str description: Description of the component (optional)\n :param str link: A hyperlink to the component (optional)\n :param int order: Order of the component (optional)\n :param int group_id: The group ID of the component (optional)\n :param bool enabled: Whether the component is enabled (optional)\n :return: Created component data (:class:`dict`)\n\n .. seealso:: https://docs.cachethq.io/reference#components\n .. seealso:: https://docs.cachethq.io/docs/component-statuses\n \"\"\"\n data = ApiParams()\n data['name'] = name\n data['status'] = status\n data['description'] = description\n data['link'] = link\n data['order'] = order\n data['group_id'] = group_id\n data['enabled'] = enabled\n return self._post('components', data=data)['data']",
"def create(self, component_context, overriding_args):\n \"\"\"\n Creates a new instance of the component, respecting the scope.\n :param component_context: The context to resolve dependencies from.\n :param overriding_args: Overriding arguments to use (by name) instead of resolving them.\n :return: An instance of the component.\n \"\"\"\n return self.component_scope.instance(lambda: self._create(component_context, overriding_args))",
"def addcomponent(self, data):\n \"\"\"\n A method to create a component in Bugzilla. Takes a dict, with the\n following elements:\n\n product: The product to create the component in\n component: The name of the component to create\n description: A one sentence summary of the component\n default_assignee: The bugzilla login (email address) of the initial\n owner of the component\n default_qa_contact (optional): The bugzilla login of the\n initial QA contact\n default_cc: (optional) The initial list of users to be CC'ed on\n new bugs for the component.\n is_active: (optional) If False, the component is hidden from\n the component list when filing new bugs.\n \"\"\"\n data = data.copy()\n self._component_data_convert(data)\n return self._proxy.Component.create(data)",
"def create_component(self,\n name,\n project,\n description=None,\n leadUserName=None,\n assigneeType=None,\n isAssigneeTypeValid=False,\n ):\n \"\"\"Create a component inside a project and return a Resource for it.\n\n :param name: name of the component\n :type name: str\n :param project: key of the project to create the component in\n :type project: str\n :param description: a description of the component\n :type description: str\n :param leadUserName: the username of the user responsible for this component\n :type leadUserName: Optional[str]\n :param assigneeType: see the ComponentBean.AssigneeType class for valid values\n :type assigneeType: Optional[str]\n :param isAssigneeTypeValid: boolean specifying whether the assignee type is acceptable (Default: False)\n :type isAssigneeTypeValid: bool\n :rtype: Component\n \"\"\"\n data = {\n 'name': name,\n 'project': project,\n 'isAssigneeTypeValid': isAssigneeTypeValid}\n if description is not None:\n data['description'] = description\n if leadUserName is not None:\n data['leadUserName'] = leadUserName\n if assigneeType is not None:\n data['assigneeType'] = assigneeType\n\n url = self._get_url('component')\n r = self._session.post(\n url, data=json.dumps(data))\n\n component = Component(self._options, self._session, raw=json_loads(r))\n return component",
"function (id) {\n var uninit = [], c = 0, ul, //array of components to init\n i = 0, l, comps, comp;\n\n //add multiple arguments\n if (arguments.length > 1) {\n l = arguments.length;\n for (; i < l; i++) {\n uninit.push(arguments[i]);\n }\n //split components if contains comma\n } else if (id.indexOf(',') !== -1) {\n comps = id.split(rlist);\n l = comps.length;\n for (; i < l; i++) {\n uninit.push(comps[i]);\n }\n //single component passed\n } else {\n uninit.push(id);\n }\n\n //extend the components\n ul = uninit.length;\n for (; c < ul; c++) {\n if (this.__c[uninit[c]] == true)\n continue\n this.__c[uninit[c]] = true\n comp = components[uninit[c]];\n this.extend(comp);\n //if constructor, call it\n if (comp && \"init\" in comp) {\n comp.init.call(this);\n }\n }\n\n this.trigger(\"NewComponent\", uninit);\n return this;\n }",
"function (id)\n\t\t{\n\t\t\tif (_components[id] != undefined) \n\t\t\t\treturn _components[id];\n\t\t\telse if (this.hasComponent('classBuilder'))\n\t\t\t{\n\t\t\t\tvar config = _componentsConfig[id] || {},\n\t\t\t\t\tclassName = config.class || id;\n\t\t\t\tif (this.getComponent('classBuilder').exists(className))\n\t\t\t\t{\n\t\t\t\t\tconfig.id = id;\n\t\t\t\t\tconfig.autoInit = false;\n\t\t\t\t\tvar component = this.createComponent(className,config);\n\t\t\t\t\tif (config.setParent)\n\t\t\t\t\t\tcomponent.setParent(self);\n\t\t\t\t\tself.e.loadComponent(e,id,component);\n\t\t\t\t\t(!config.autoInit)&&component.init(config);\n\t\t\t\t\t_components[id] = component;\n\t\t\t\t\tif (typeof config.alias == 'string')\n\t\t\t\t\t\tthis[config.alias] = _components[id]\n\t\t\t\t\treturn _components[id];\n\t\t\t\t} else\n\t\t\t\t\treturn false;\n\t\t\t}\n\t\t}",
"function createComponentFactory(selector, componentType, viewDefFactory, inputs, outputs, ngContentSelectors) {\n return new ComponentFactory_(selector, componentType, viewDefFactory, inputs, outputs, ngContentSelectors);\n}",
"function _create (state, vel) {\n let comp = vel._comp\n console.assert(!comp, 'Component instance should not exist when this method is used.')\n let parent = vel.parent._comp\n // making sure the parent components have been instantiated\n if (!parent) {\n parent = _create(state, vel.parent)\n }\n // TODO: probably we should do something with forwarded/forwarding components here?\n if (vel._isVirtualComponent) {\n console.assert(parent, 'A Component should have a parent.')\n comp = state.componentFactory.createComponent(vel.ComponentClass, parent, vel.props)\n // HACK: making sure that we have the right props\n // TODO: instead of HACK add an assertion, and make otherwise sure that vel.props is set correctly\n vel.props = comp.props\n if (vel._forwardedEl) {\n let forwardedEl = vel._forwardedEl\n let forwardedComp = state.componentFactory.createComponent(forwardedEl.ComponentClass, comp, forwardedEl.props)\n // HACK same as before\n forwardedEl.props = forwardedComp.props\n comp._forwardedComp = forwardedComp\n }\n } else if (vel._isVirtualHTMLElement) {\n comp = state.componentFactory.createElementComponent(parent, vel)\n } else if (vel._isVirtualTextNode) {\n comp = state.componentFactory.createTextNodeComponent(parent, vel)\n }\n if (vel._ref) {\n comp._ref = vel._ref\n }\n if (vel._owner) {\n comp._owner = vel._owner._comp\n }\n vel._comp = comp\n return comp\n}",
"def create(output_dir):\n \"\"\"Create a new collector or actor\"\"\"\n template_path = os.path.join(os.path.dirname(__file__), 'project_template')\n\n click.secho('Let\\'s create a new component!', fg='green')\n name = click.prompt('What is the name of this component (ex. python-pip)?')\n\n click.secho('')\n click.secho('We assume this will be pushed to GitHub and Docker Hub eventually, but these don\\'t have to exist yet.', fg='green')\n repo_owner = click.prompt('GitHub repo owner (i.e. your username or organization name)')\n repo_name = click.prompt('GitHub repo name', default=name)\n dockerhub_owner = click.prompt('Docker Hub repo owner', default=repo_owner)\n dockerhub_name = click.prompt('Docker Hub repo name', default=repo_name)\n\n license_owner = click.prompt('Who should be the copyright owner on project?', default=repo_owner)\n\n extra_context = {\n 'name': name,\n 'name_shields_io': name.replace('-', '--'),\n 'current_year': datetime.datetime.now().year,\n 'dependencies_cli_version': __version__,\n 'repo_owner': repo_owner,\n 'repo_name': repo_name,\n 'dockerhub_owner': dockerhub_owner,\n 'dockerhub_name': dockerhub_name,\n 'license_owner': license_owner,\n }\n project_dir = cookiecutter(template_path, no_input=True, extra_context=extra_context, output_dir=output_dir)\n\n click.secho('')\n click.secho('{name} is ready to go, `cd {project_dir}` and try running `dependencies test`!'.format(name=name, project_dir=project_dir), fg='green')\n click.secho(\n 'We started you out with a fully functioning component based in python.\\n' +\n 'Once you\\'ve got a handle on how it works then you can change it to whatever language you want.'\n )",
"def postComponents(self, name, status, **kwargs):\n '''Create a new component.\n\n :param name: Name of the component\n :param status: Status of the component; 1-4\n :param description: (optional) Description of the component\n :param link: (optional) A hyperlink to the component\n :param order: (optional) Order of the component\n :param group_id: (optional) The group id that the component is within\n :param enabled: (optional)\n :return: :class:`Response <Response>` object\n :rtype: requests.Response\n '''\n\n kwargs['name'] = name\n kwargs['status'] = status\n return self.__postRequest('/components', kwargs)",
"function Component(parent, context, id, scope) {\n this.parent = parent;\n this.context = context;\n this.id = id;\n this._scope = scope;\n }",
"function(id) {\n var comps,\n compName,\n comp,\n c = 0;\n\n //add multiple arguments\n if (arguments.length === 1 && id.indexOf(\",\") !== -1) {\n comps = id.split(rlist);\n } else {\n comps = arguments;\n }\n\n //extend the components\n for (; c < comps.length; c++) {\n compName = comps[c];\n\n // If component already exists, continue\n if (this.__c[compName] === true) {\n continue;\n }\n this.__c[compName] = true;\n // update map from component to (entityId -> entity)\n (compEntities[compName] = compEntities[compName] || {})[\n this[0]\n ] = this;\n\n comp = components[compName];\n // Copy all methods of the component\n this.extend(comp);\n // Add any required components\n if (comp && \"required\" in comp) {\n this.requires(comp.required);\n }\n // Define properties\n if (comp && \"properties\" in comp) {\n var props = comp.properties;\n for (var propertyName in props) {\n Object.defineProperty(\n this,\n propertyName,\n props[propertyName]\n );\n }\n }\n // Bind events\n if (comp && \"events\" in comp) {\n var auto = comp.events;\n for (var eventName in auto) {\n var fn =\n typeof auto[eventName] === \"function\"\n ? auto[eventName]\n : comp[auto[eventName]];\n this.bind(eventName, fn);\n }\n }\n // Call constructor function\n if (comp && \"init\" in comp) {\n comp.init.call(this);\n }\n }\n\n this.trigger(\"NewComponent\", comps);\n return this;\n }"
] |
[
0.8072904944419861,
0.7723421454429626,
0.7717704772949219,
0.7657792568206787,
0.7609652876853943,
0.7576033473014832,
0.7565774917602539,
0.7495549321174622,
0.7473636269569397,
0.7469908595085144,
0.7466772794723511,
0.7457039952278137
] |
Update a component
|
def update(self, id, data):
"""Update a component
"""
id = self.as_id(id)
response = self.http.patch(
'%s/%s' % (self, id), json=data, auth=self.auth
)
response.raise_for_status()
return response.json()
|
[
"def update(self, component_id, name=None, status=None, description=None,\n link=None, order=None, group_id=None, enabled=True):\n \"\"\"Update a component\n\n :param int component_id: Component ID\n :param str name: Name of the component (optional)\n :param int status: Status of the component; 1-4\n :param str description: Description of the component (optional)\n :param str link: A hyperlink to the component (optional)\n :param int order: Order of the component (optional)\n :param int group_id: The group ID of the component (optional)\n :param bool enabled: Whether the component is enabled (optional)\n :return: Updated component data (:class:`dict`)\n\n .. seealso:: https://docs.cachethq.io/reference#components\n .. seealso:: https://docs.cachethq.io/docs/component-statuses\n \"\"\"\n data = ApiParams()\n data['component'] = component_id\n data['name'] = name\n data['status'] = status\n data['description'] = description\n data['link'] = link\n data['order'] = order\n data['group_id'] = group_id\n data['enabled'] = enabled\n return self._put('components/%s' % component_id, data=data)['data']",
"def update(context, id, export_control, active):\n \"\"\"update(context, id, export_control, active)\n\n Update a component\n\n >>> dcictl component-update [OPTIONS]\n\n :param string id: ID of the component [required]\n :param boolean export-control: Set the component visible for users\n :param boolean active: Set the component in the active state\n \"\"\"\n\n component_info = component.get(context, id=id)\n\n etag = component_info.json()['component']['etag']\n\n result = component.update(context, id=id, etag=etag,\n export_control=export_control,\n state=utils.active_string(active))\n\n utils.format_output(result, context.format)",
"def update_component(name, comp, component_dict):\n \"\"\"Get a component from a component dict.\n\n \"\"\"\n for dia in component_dict.get('dialogues', ()):\n try:\n comp.add_dialogue(*_get_pair(dia))\n except Exception as e:\n msg = 'In device %s, malformed dialogue %s\\n%r'\n raise Exception(msg % (name, dia, e))\n\n for prop_name, prop_dict in component_dict.get('properties', {}).items():\n try:\n getter = (_get_pair(prop_dict['getter'])\n if 'getter' in prop_dict else None)\n setter = (_get_triplet(prop_dict['setter'])\n if 'setter' in prop_dict else None)\n comp.add_property(prop_name, prop_dict.get('default', ''),\n getter, setter, prop_dict.get('specs', {}))\n except Exception as e:\n msg = 'In device %s, malformed property %s\\n%r'\n raise type(e)(msg % (name, prop_name, format_exc()))",
"def editcomponent(self, data):\n \"\"\"\n A method to edit a component in Bugzilla. Takes a dict, with\n mandatory elements of product. component, and initialowner.\n All other elements are optional and use the same names as the\n addcomponent() method.\n \"\"\"\n data = data.copy()\n self._component_data_convert(data, update=True)\n return self._proxy.Component.update(data)",
"protected function updateComponent()\n {\n $this->filesystem->delete(resource_path('assets/js/components/Example.vue'));\n $this->filesystem->copy($this->stubPath('/Example.js'), resource_path('assets/js/components/Example.js'));\n }",
"function (transaction, prevParentElement, nextParentElement, prevUnmaskedContext, nextUnmaskedContext) {\n\t var inst = this._instance;\n\t !(inst != null) ? false ? invariant(false, 'Attempted to update component `%s` that has already been unmounted (or failed to mount).', this.getName() || 'ReactCompositeComponent') : _prodInvariant('136', this.getName() || 'ReactCompositeComponent') : void 0;\n\t\n\t var willReceive = false;\n\t var nextContext;\n\t\n\t // Determine if the context has changed or not\n\t if (this._context === nextUnmaskedContext) {\n\t nextContext = inst.context;\n\t } else {\n\t nextContext = this._processContext(nextUnmaskedContext);\n\t willReceive = true;\n\t }\n\t\n\t var prevProps = prevParentElement.props;\n\t var nextProps = nextParentElement.props;\n\t\n\t // Not a simple state update but a props update\n\t if (prevParentElement !== nextParentElement) {\n\t willReceive = true;\n\t }\n\t\n\t // An update here will schedule an update but immediately set\n\t // _pendingStateQueue which will ensure that any state updates gets\n\t // immediately reconciled instead of waiting for the next batch.\n\t if (willReceive && inst.componentWillReceiveProps) {\n\t if (false) {\n\t measureLifeCyclePerf(function () {\n\t return inst.componentWillReceiveProps(nextProps, nextContext);\n\t }, this._debugID, 'componentWillReceiveProps');\n\t } else {\n\t inst.componentWillReceiveProps(nextProps, nextContext);\n\t }\n\t }\n\t\n\t var nextState = this._processPendingState(nextProps, nextContext);\n\t var shouldUpdate = true;\n\t\n\t if (!this._pendingForceUpdate) {\n\t if (inst.shouldComponentUpdate) {\n\t if (false) {\n\t shouldUpdate = measureLifeCyclePerf(function () {\n\t return inst.shouldComponentUpdate(nextProps, nextState, nextContext);\n\t }, this._debugID, 'shouldComponentUpdate');\n\t } else {\n\t shouldUpdate = inst.shouldComponentUpdate(nextProps, nextState, nextContext);\n\t }\n\t } else {\n\t if (this._compositeType === CompositeTypes.PureClass) {\n\t shouldUpdate = !shallowEqual(prevProps, nextProps) || !shallowEqual(inst.state, nextState);\n\t }\n\t }\n\t }\n\t\n\t if (false) {\n\t process.env.NODE_ENV !== 'production' ? warning(shouldUpdate !== undefined, '%s.shouldComponentUpdate(): Returned undefined instead of a ' + 'boolean value. Make sure to return true or false.', this.getName() || 'ReactCompositeComponent') : void 0;\n\t }\n\t\n\t this._updateBatchNumber = null;\n\t if (shouldUpdate) {\n\t this._pendingForceUpdate = false;\n\t // Will set `this.props`, `this.state` and `this.context`.\n\t this._performComponentUpdate(nextParentElement, nextProps, nextState, nextContext, transaction, nextUnmaskedContext);\n\t } else {\n\t // If it's determined that a component should not update, we still want\n\t // to set props and state but we shortcut the rest of the update.\n\t this._currentElement = nextParentElement;\n\t this._context = nextUnmaskedContext;\n\t inst.props = nextProps;\n\t inst.state = nextState;\n\t inst.context = nextContext;\n\t }\n\t }",
"function updateComponent(transaction, prevParentElement, nextParentElement, prevUnmaskedContext, nextUnmaskedContext) {\n var inst = this._instance;\n var willReceive = false;\n var nextContext;\n var nextProps;\n\n // Determine if the context has changed or not\n if (this._context === nextUnmaskedContext) {\n nextContext = inst.context;\n } else {\n nextContext = this._processContext(nextUnmaskedContext);\n willReceive = true;\n }\n\n // Distinguish between a props update versus a simple state update\n if (prevParentElement === nextParentElement) {\n // Skip checking prop types again -- we don't read inst.props to avoid\n // warning for DOM component props in this upgrade\n nextProps = nextParentElement.props;\n } else {\n nextProps = this._processProps(nextParentElement.props);\n willReceive = true;\n }\n\n // An update here will schedule an update but immediately set\n // _pendingStateQueue which will ensure that any state updates gets\n // immediately reconciled instead of waiting for the next batch.\n if (willReceive && inst.componentWillReceiveProps) {\n inst.componentWillReceiveProps(nextProps, nextContext);\n }\n\n var nextState = _processPendingState.call(this, nextProps, nextContext);\n\n var shouldUpdate = this._pendingForceUpdate || !inst.shouldComponentUpdate || inst.shouldComponentUpdate(nextProps, nextState, nextContext);\n\n // if (process.env.NODE_ENV !== 'production') {\n // process.env.NODE_ENV !== 'production' ? warning(shouldUpdate !== undefined, '%s.shouldComponentUpdate(): Returned undefined instead of a ' + 'boolean value. Make sure to return true or false.', this.getName() || 'ReactCompositeComponent') : void 0;\n // }\n\n if (shouldUpdate) {\n this._pendingForceUpdate = false;\n // Will set `this.props`, `this.state` and `this.context`.\n _performComponentUpdate.call(this, nextParentElement, nextProps, nextState, nextContext, transaction, nextUnmaskedContext);\n } else {\n // If it's determined that a component should not update, we still want\n // to set props and state but we shortcut the rest of the update.\n this._currentElement = nextParentElement;\n this._context = nextUnmaskedContext;\n inst.props = nextProps;\n inst.state = nextState;\n inst.context = nextContext;\n }\n}",
"function updateVirtualComponent (vnode) {\n var vm = this;\n var componentId = vm.$options.componentId;\n if (vm._isMounted) {\n callHook(vm, 'beforeUpdate');\n }\n vm._vnode = vnode;\n if (vm._isMounted && componentId) {\n // TODO: data should be filtered and without bindings\n var data = Object.assign({}, vm._data);\n updateComponentData(componentId, data, function () {\n callHook(vm, 'updated');\n });\n }\n}",
"private void doUpdateComponent() throws PageException {\n\n\tadmin.updateComponentDeepSearch(getBoolObject(\"admin\", action, \"deepSearch\"));\n\tadmin.updateBaseComponent(getString(\"admin\", action, \"baseComponentTemplateCFML\"), getString(\"admin\", action, \"baseComponentTemplateLucee\"));\n\tadmin.updateComponentDumpTemplate(getString(\"admin\", action, \"componentDumpTemplate\"));\n\tadmin.updateComponentDataMemberDefaultAccess(getString(\"admin\", action, \"componentDataMemberDefaultAccess\"));\n\tadmin.updateTriggerDataMember(getBoolObject(\"admin\", action, \"triggerDataMember\"));\n\tadmin.updateComponentUseShadow(getBoolObject(\"admin\", action, \"useShadow\"));\n\tadmin.updateComponentDefaultImport(getString(\"admin\", action, \"componentDefaultImport\"));\n\tadmin.updateComponentLocalSearch(getBoolObject(\"admin\", action, \"componentLocalSearch\"));\n\tadmin.updateComponentPathCache(getBoolObject(\"admin\", action, \"componentPathCache\"));\n\tstore();\n\tadminSync.broadcast(attributes, config);\n }",
"function updateComponents (address, node, state, diff, bindings, renderResult,\n relativeAddress, stateCallers, opts) {\n // TODO pull these out to top level\n const updateRecurse = ([ d, s ], k) => {\n // TODO in updateRecurse functions where k can be null, there must be a\n // nicer way to organize things with fewer null checks\n const component = k !== null ? node.component : node\n const newAddress = k !== null ? addressWith(address, k) : address\n const newRelativeAddress = k !== null\n ? addressWith(relativeAddress, k)\n : relativeAddress\n const b = k !== null ? get(bindings, k) : bindings\n\n // Get binding el\n const lastRenderedEl = get(b, 'data')\n const el = renderResult !== null\n ? renderResult[makeBindingKey(newRelativeAddress)]\n : lastRenderedEl\n\n // Update the component. If DESTROY, then there will not be a binding.\n const res = updateEl(newAddress, component, s, d.data, lastRenderedEl, el,\n stateCallers, opts)\n // Fall back on old bindings.\n const nextRenderResult = res.renderResult !== null\n ? res.renderResult\n : null\n // Update children\n const children = updateComponents(newAddress, component.model, s,\n d.children, get(b, 'children'),\n nextRenderResult, [], stateCallers, opts)\n return tagType(NODE, { data: el, children })\n }\n\n // TODO pull these out to top level\n const recurse = (n, k) => {\n return updateComponents(addressWith(address, k), n, get(state, k), diff[k],\n get(bindings, k), renderResult,\n addressWith(relativeAddress, k), stateCallers, opts)\n }\n\n return match(node, match_updateComponents, null, diff, state, updateRecurse,\n recurse)\n}",
"def update(details)\n details.each do |k, v|\n send(\"#{k}=\", v)\n end\n self.class.update(component_key, details)\n end",
"def update(user_name, repo_name, component_id, params={})\n _update_user_repo_params(user_name, repo_name)\n _validate_user_repo_params(user, repo) unless user? && repo?\n _validate_presence_of component_id\n\n normalize! params\n filter! VALID_COMPONENT_INPUTS, params\n assert_required_keys(VALID_COMPONENT_INPUTS, params)\n\n put_request(\"/1.0/repositories/#{user}/#{repo.downcase}/issues/components/#{component_id}\", params)\n end"
] |
[
0.8214623332023621,
0.8128786683082581,
0.7688286304473877,
0.7536182403564453,
0.7502194046974182,
0.7400845885276794,
0.7399318218231201,
0.7325282096862793,
0.7288486957550049,
0.7256565690040588,
0.7254197001457214,
0.7252134680747986
] |
Delete a component by id
|
def delete(self, id):
"""Delete a component by id
"""
id = self.as_id(id)
response = self.http.delete(
'%s/%s' % (self.api_url, id),
auth=self.auth)
response.raise_for_status()
|
[
"def delete_component(self, id):\n \"\"\"Delete component by id.\n\n :param id: ID of the component to use\n :type id: str\n :rtype: Response\n \"\"\"\n url = self._get_url('component/' + str(id))\n return self._session.delete(url)",
"def delete(context, id):\n \"\"\"delete(context, id)\n\n Delete a component.\n\n >>> dcictl component-delete [OPTIONS]\n\n :param string id: ID of the component to delete [required]\n \"\"\"\n result = component.delete(context, id=id)\n if result.status_code == 204:\n utils.print_json({'id': id, 'message': 'Component deleted.'})\n else:\n utils.format_output(result, context.format)",
"function (id, soft) {\n if (soft === false) {\n var props = components[id], prop;\n for (prop in props) {\n delete this[prop];\n }\n }\n delete this.__c[id];\n\n this.trigger(\"RemoveComponent\", id);\n return this;\n }",
"def delete_tag_for_component(user, c_id, tag_id):\n \"\"\"Delete a tag on a specific component.\"\"\"\n # Todo : check c_id and tag_id exist in db\n\n query = _TABLE_TAGS.delete().where(_TABLE_TAGS.c.tag_id == tag_id and\n _TABLE_TAGS.c.component_id == c_id)\n\n try:\n flask.g.db_conn.execute(query)\n except sa_exc.IntegrityError:\n raise dci_exc.DCICreationConflict(_TABLE_TAGS.c.tag_id, 'tag_id')\n\n return flask.Response(None, 204, content_type='application/json')",
"def delete(user_name, repo_name, component_id, params={})\n _update_user_repo_params(user_name, repo_name)\n _validate_user_repo_params(user, repo) unless user? && repo?\n\n _validate_presence_of component_id\n normalize! params\n\n delete_request(\"/1.0/repositories/#{user}/#{repo.downcase}/issues/components/#{component_id}\", params)\n end",
"private void markComponentForDeletion(String id , UIComponent component)\n {\n _componentsMarkedForDeletion.get(_deletionLevel).put(id, component);\n }",
"private UIComponent removeComponentForDeletion(String id)\n {\n UIComponent removedComponent = _componentsMarkedForDeletion.get(_deletionLevel).remove(id); \n if (removedComponent != null && _deletionLevel > 0)\n {\n _componentsMarkedForDeletion.get(_deletionLevel-1).remove(id);\n }\n return removedComponent;\n }",
"function(id, soft) {\n var comp = components[id];\n this.trigger(\"RemoveComponent\", id);\n if (comp && \"events\" in comp) {\n var auto = comp.events;\n for (var eventName in auto) {\n var fn =\n typeof auto[eventName] === \"function\"\n ? auto[eventName]\n : comp[auto[eventName]];\n this.unbind(eventName, fn);\n }\n }\n if (comp && \"remove\" in comp) {\n comp.remove.call(this, false);\n }\n if (soft === false && comp) {\n for (var prop in comp) {\n delete this[prop];\n }\n }\n delete this.__c[id];\n // update map from component to (entityId -> entity)\n if (compEntities[id]) {\n delete compEntities[id][this[0]];\n }\n\n return this;\n }",
"def delete(ctx, componentname):\n \"\"\"Delete an existing component configuration. This will trigger\n the creation of its default configuration upon next restart.\"\"\"\n col = ctx.obj['col']\n\n if col.count({'name': componentname}) > 1:\n log('More than one component configuration of this name! Try '\n 'one of the uuids as argument. Get a list with \"config '\n 'list\"')\n return\n\n log('Deleting component configuration', componentname,\n emitter='MANAGE')\n\n configuration = col.find_one({'name': componentname})\n\n if configuration is None:\n configuration = col.find_one({'uuid': componentname})\n\n if configuration is None:\n log('Component configuration not found:', componentname,\n emitter='MANAGE')\n return\n\n configuration.delete()\n log('Done')",
"def delete(self, id: int):\n \"\"\" Delete asset class \"\"\"\n assert isinstance(id, int)\n\n self.open_session()\n to_delete = self.get(id)\n self.session.delete(to_delete)\n self.save()",
"def file_delete(context, id, file_id):\n \"\"\"file_delete(context, id, path)\n\n Delete a component file\n\n >>> dcictl component-file-delete [OPTIONS]\n\n :param string id: ID of the component to delete file [required]\n :param string file_id: ID for the file to delete [required]\n \"\"\"\n component.file_delete(context, id=id, file_id=file_id)",
"public QYResultType delete(Integer id){\n BeanUtil.requireNonNull(id, \"id is null\");\n String url = BASE_API_URL + \"cgi-bin/department/delete?access_token=#&id=\" + id;\n BaseResponse r = executeGet(url);\n return QYResultType.get(r.getErrcode());\n }"
] |
[
0.8823086619377136,
0.8571924567222595,
0.7978848814964294,
0.789355456829071,
0.7869218587875366,
0.7781111001968384,
0.766715407371521,
0.7617802023887634,
0.7290953993797302,
0.722851037979126,
0.7205148935317993,
0.7175469398498535
] |
Get a list of this github component
:param url: full url
:param Comp: a :class:`.Component` class
:param callback: Optional callback
:param limit: Optional number of items to retrieve
:param data: additional query data
:return: a list of ``Comp`` objects with data
|
def get_list(self, url=None, callback=None, limit=100, **data):
"""Get a list of this github component
:param url: full url
:param Comp: a :class:`.Component` class
:param callback: Optional callback
:param limit: Optional number of items to retrieve
:param data: additional query data
:return: a list of ``Comp`` objects with data
"""
url = url or str(self)
data = dict(((k, v) for k, v in data.items() if v))
all_data = []
if limit:
data['per_page'] = min(limit, 100)
while url:
response = self.http.get(url, params=data, auth=self.auth)
response.raise_for_status()
result = response.json()
n = m = len(result)
if callback:
result = callback(result)
m = len(result)
all_data.extend(result)
if limit and len(all_data) > limit:
all_data = all_data[:limit]
break
elif m == n:
data = None
next = response.links.get('next', {})
url = next.get('url')
else:
break
return all_data
|
[
"def comp(request, slug, directory_slug=None):\n \"\"\"\n View the requested comp\n \"\"\"\n context = {}\n path = settings.COMPS_DIR\n comp_dir = os.path.split(path)[1]\n template = \"{0}/{1}\".format(comp_dir, slug)\n if directory_slug:\n template = \"{0}/{1}/{2}\".format(comp_dir, directory_slug, slug)\n working_dir = os.path.join(path, slug)\n if os.path.isdir(working_dir):\n return redirect('comp-listing', directory_slug=slug)\n\n try:\n t = get_template(template)\n except TemplateDoesNotExist:\n return redirect('comp-listing')\n\n c = RequestContext(request, context)\n return HttpResponse(t.render(c))",
"def info(self, url, limit=None):\n \"\"\"GETs \"info\" about ``url``. See https://github.com/reddit/reddit/wiki/API%3A-info.json.\n \n URL: ``http://www.reddit.com/api/info/?url=<url>``\n \n :param url: url\n :param limit: max number of links to get\n \"\"\"\n return self._limit_get('api', 'info', params=dict(url=url), limit=limit)",
"def search(self, query, limit=None):\n \"\"\"Use reddit's search function. Returns :class:`things.Listing` object.\n \n URL: ``http://www.reddit.com/search/?q=<query>&limit=<limit>``\n \n :param query: query string\n :param limit: max number of results to get\n \"\"\"\n return self._limit_get('search', params=dict(q=query), limit=limit)",
"def get_resource_listing(url, offset, limit, properties):\n \"\"\"Gneric method to retrieve a resource listing from a SCO-API. Takes the\n resource-specific API listing Url as argument.\n\n Parameters\n ----------\n url : string\n Resource listing Url for a SCO-API\n offset : int, optional\n Starting offset for returned list items\n limit : int, optional\n Limit the number of items in the result\n properties : List(string)\n List of additional object properties to be included for items in\n the result\n\n Returns\n -------\n List(ResourceHandle)\n List of resource handle (one per subject in the object listing)\n \"\"\"\n # Create listing query based on given arguments\n query = [\n QPARA_OFFSET + '=' + str(offset),\n QPARA_LIMIT + '=' + str(limit)\n ]\n # Add properties argument if property list is not None and not empty\n if not properties is None:\n if len(properties) > 0:\n query.append(QPARA_ATTRIBUTES + '=' + ','.join(properties))\n # Add query to Url.\n url = url + '?' + '&'.join(query)\n # Get subject listing Url for given SCO-API and decorate it with\n # given listing arguments. Then retrieve listing from SCO-API.\n json_obj = JsonResource(url).json\n # Convert result into a list of resource handles and return the result\n resources = []\n for element in json_obj['items']:\n resource = ResourceHandle(element)\n # Add additional properties to resource if list is given\n if not properties is None:\n resource.properties = {}\n for prop in properties:\n if prop in element:\n resource.properties[prop] = element[prop]\n resources.append(resource)\n return resources",
"function list(appName, callback) {\n if (!appName) {\n return callback(ArgumentError('appName'));\n }\n\n var uri = format('/%s/apps/%s/config/', deis.version, appName);\n commons.get(uri, function onListResponse(err, result) {\n console.log(result);\n callback(err, result ? extractLimits(result) : null);\n });\n }",
"def fetch_list_members(list_url):\n \"\"\" Get all members of the list specified by the given url. E.g., https://twitter.com/lore77/lists/libri-cultura-education \"\"\"\n match = re.match(r'.+twitter\\.com\\/(.+)\\/lists\\/(.+)', list_url)\n if not match:\n print('cannot parse list url %s' % list_url)\n return []\n screen_name, slug = match.groups()\n print('collecting list %s/%s' % (screen_name, slug))\n return twutil.collect.list_members(slug, screen_name)",
"def list(self, url):\n \"\"\" List ressources store into LinShare.\"\"\"\n url = self.get_full_url(url)\n self.log.debug(\"list url : \" + url)\n # Building request\n request = urllib2.Request(url)\n request.add_header('Content-Type', 'application/json; charset=UTF-8')\n request.add_header('Accept', 'application/json')\n # Do request\n ret = self.do_request(request)\n self.log.debug(\"\"\"list url : %(url)s : request time : %(time)s\"\"\",\n {\"url\": url,\n \"time\": self.last_req_time})\n return ret",
"def get(self, component_id=None, **kwargs):\n \"\"\"Get components\n\n :param component_id: Component ID (optional)\n :return: Components data (:class:`Generator`)\n\n Additional named arguments may be passed and are directly transmitted\n to API. It is useful to use the API search features.\n\n .. seealso:: https://docs.cachethq.io/reference#get-components\n .. seealso:: https://docs.cachethq.io/docs/advanced-api-usage\n \"\"\"\n path = 'components'\n if component_id is not None:\n path += '/%s' % component_id\n return self.paginate_get(path, data=kwargs)",
"def search_all(self, quota=50, format='json'):\n '''\n Returns a single list containing up to 'limit' Result objects\n Will keep requesting until quota is met\n Will also truncate extra results to return exactly the given quota\n '''\n quota_left = quota\n results = []\n while quota_left > 0:\n more_results = self._search(quota_left, format)\n if not more_results:\n break\n results += more_results\n quota_left = quota_left - len(more_results)\n time.sleep(1)\n results = results[0:quota]\n return results",
"def get_items(self, limit=None, *, query=None, order_by=None, batch=None):\n \"\"\" Returns a collection of drive items from the root folder\n\n :param int limit: max no. of items to get. Over 999 uses batch.\n :param query: applies a OData filter to the request\n :type query: Query or str\n :param order_by: orders the result set based on this condition\n :type order_by: Query or str\n :param int batch: batch size, retrieves items in\n batches allowing to retrieve more items than the limit.\n :return: list of items in this folder\n :rtype: list[DriveItem] or Pagination\n \"\"\"\n\n if self.object_id:\n # reference the current drive_id\n url = self.build_url(\n self._endpoints.get('list_items').format(id=self.object_id))\n else:\n # we don't know the drive_id so go to the default\n url = self.build_url(self._endpoints.get('list_items_default'))\n\n return self._base_get_list(url, limit=limit, query=query,\n order_by=order_by, batch=batch)",
"def list_(consul_url=None, token=None, key=None, **kwargs):\n '''\n List keys in Consul\n\n :param consul_url: The Consul server URL.\n :param key: The key to use as the starting point for the list.\n :return: The list of keys.\n\n CLI Example:\n\n .. code-block:: bash\n\n salt '*' consul.list\n salt '*' consul.list key='web'\n\n '''\n ret = {}\n\n if not consul_url:\n consul_url = _get_config()\n if not consul_url:\n log.error('No Consul URL found.')\n ret['message'] = 'No Consul URL found.'\n ret['res'] = False\n return ret\n\n query_params = {}\n\n if 'recurse' in kwargs:\n query_params['recurse'] = 'True'\n\n # No key so recurse and show all values\n if not key:\n query_params['recurse'] = 'True'\n function = 'kv/'\n else:\n function = 'kv/{0}'.format(key)\n\n query_params['keys'] = 'True'\n query_params['separator'] = '/'\n ret = _query(consul_url=consul_url,\n function=function,\n token=token,\n query_params=query_params)\n return ret",
"def _search(self, limit, format):\n '''\n Returns a list of result objects, with the url for the next page bing search url.\n '''\n url = self.QUERY_URL.format(requests.utils.quote(\"'{}'\".format(self.query)), min(50, limit), self.current_offset, format)\n r = requests.get(url, auth=(\"\", self.api_key))\n try:\n json_results = r.json()\n except ValueError as vE:\n if not self.safe:\n raise PyBingVideoException(\"Request returned with code %s, error msg: %s\" % (r.status_code, r.text))\n else:\n print (\"[ERROR] Request returned with code %s, error msg: %s. \\nContinuing in 5 seconds.\" % (r.status_code, r.text))\n time.sleep(5)\n packaged_results = [VideoResult(single_result_json) for single_result_json in json_results['d']['results']]\n self.current_offset += min(50, limit, len(packaged_results))\n return packaged_results"
] |
[
0.6603466868400574,
0.639123260974884,
0.6311928629875183,
0.6283335089683533,
0.6283116340637207,
0.6262850165367126,
0.6243754625320435,
0.6223114728927612,
0.6206058263778687,
0.6196853518486023,
0.6196420788764954,
0.6195151805877686
] |
Return all comments for this issue/pull request
|
def comments(self, issue):
"""Return all comments for this issue/pull request
"""
commit = self.as_id(issue)
return self.get_list(url='%s/%s/comments' % (self, commit))
|
[
"def getPullRequestComments(self, repo_user, repo_name, pull_number):\n \"\"\"\n GET /repos/:owner/:repo/pulls/:number/comments\n\n :param pull_number: The pull request's number.\n \"\"\"\n return self.api.makeRequestAllPages(\n ['repos', repo_user, repo_name,\n 'pulls', str(pull_number), 'comments'])",
"def comments(self, issue):\n \"\"\"Get a list of comment Resources.\n\n :param issue: the issue to get comments from\n :type issue: str\n :rtype: List[Comment]\n \"\"\"\n r_json = self._get_json('issue/' + str(issue) + '/comment')\n\n comments = [Comment(self._options, self._session, raw_comment_json)\n for raw_comment_json in r_json['comments']]\n return comments",
"def get_pulls_comments(self, sort=github.GithubObject.NotSet, direction=github.GithubObject.NotSet, since=github.GithubObject.NotSet):\n \"\"\"\n :calls: `GET /repos/:owner/:repo/pulls/comments <http://developer.github.com/v3/pulls/comments>`_\n :param sort: string\n :param direction: string\n :param since: datetime.datetime\n :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.PullRequestComment.PullRequestComment`\n \"\"\"\n return self.get_pulls_review_comments(sort, direction, since)",
"def lookup_comment_list(self):\n \"\"\"Lookup list of comments for an issue.\n\n ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-\n\n :returns: The pair (ISSUE, COMMENTS) where ISSUE is a dict for the\n main issue and COMMENTS is a list of comments on the issue.\n\n ~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-~-\n\n PURPOSE: Do the work of getting data from github, handling paging,\n and so on.\n\n \"\"\"\n if self.thread_id is None:\n return None, None\n\n # Just pulling a single issue here so pagination shouldn't be problem\n my_req = self.raw_pull(self.thread_id)\n if my_req.status_code != 200:\n raise GitHubAngry('Bad status code %s because %s' % (\n my_req.status_code, my_req.reason))\n issue_json = my_req.json()\n comments_url = issue_json['comments_url'] + self.url_extras\n kwargs = {} if not self.user else {'auth': (self.user, self.token)}\n comments_json = []\n while comments_url:\n logging.debug('Pulling comments URL: %s', comments_url)\n c_req = requests.get(comments_url, **kwargs)\n my_json = c_req.json()\n assert isinstance(my_json, list)\n comments_json.extend(my_json)\n comments_url = None\n if 'link' in c_req.headers: # need to handle pagination.\n logging.debug('Paginating in lookup_comment_list')\n link = c_req.headers['link'].split(',')\n for thing in link:\n potential_url, part = thing.split('; ')\n if part == 'rel=\"next\"':\n comments_url = potential_url.lstrip(' <').rstrip('> ')\n\n return issue_json, comments_json",
"def get_issue_comments(issue_number,\n repo_name=None,\n profile='github',\n since=None,\n output='min'):\n '''\n Return information about the comments for a given issue in a named repository.\n\n .. versionadded:: 2016.11.0\n\n issue_number\n The number of the issue for which to retrieve comments.\n\n repo_name\n The name of the repository to which the issue belongs. This argument is\n required, either passed via the CLI, or defined in the configured\n profile. A ``repo_name`` passed as a CLI argument will override the\n repo_name defined in the configured profile, if provided.\n\n profile\n The name of the profile configuration to use. Defaults to ``github``.\n\n since\n Only comments updated at or after this time are returned. This is a\n timestamp in ISO 8601 format: ``YYYY-MM-DDTHH:MM:SSZ``.\n\n output\n The amount of data returned by each issue. Defaults to ``min``. Change\n to ``full`` to see all issue output.\n\n CLI Example:\n\n .. code-block:: bash\n\n salt myminion github.get_issue_comments 514\n salt myminion github.get_issue 514 repo_name=salt\n '''\n org_name = _get_config_value(profile, 'org_name')\n if repo_name is None:\n repo_name = _get_config_value(profile, 'repo_name')\n\n action = '/'.join(['repos', org_name, repo_name])\n command = '/'.join(['issues', six.text_type(issue_number), 'comments'])\n\n args = {}\n if since:\n args['since'] = since\n\n comments = _query(profile, action=action, command=command, args=args)\n\n ret = {}\n for comment in comments:\n comment_id = comment.get('id')\n if output == 'full':\n ret[comment_id] = comment\n else:\n ret[comment_id] = {'id': comment.get('id'),\n 'created_at': comment.get('created_at'),\n 'updated_at': comment.get('updated_at'),\n 'user_login': comment.get('user').get('login')}\n return ret",
"def get_pulls_review_comments(self, sort=github.GithubObject.NotSet, direction=github.GithubObject.NotSet, since=github.GithubObject.NotSet):\n \"\"\"\n :calls: `GET /repos/:owner/:repo/pulls/comments <http://developer.github.com/v3/pulls/comments>`_\n :param sort: string\n :param direction: string\n :param since: datetime.datetime\n :rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.PullRequestComment.PullRequestComment`\n \"\"\"\n assert sort is github.GithubObject.NotSet or isinstance(sort, (str, unicode)), sort\n assert direction is github.GithubObject.NotSet or isinstance(direction, (str, unicode)), direction\n assert since is github.GithubObject.NotSet or isinstance(since, datetime.datetime), since\n url_parameters = dict()\n if sort is not github.GithubObject.NotSet:\n url_parameters[\"sort\"] = sort\n if direction is not github.GithubObject.NotSet:\n url_parameters[\"direction\"] = direction\n if since is not github.GithubObject.NotSet:\n url_parameters[\"since\"] = since.strftime(\"%Y-%m-%dT%H:%M:%SZ\")\n return github.PaginatedList.PaginatedList(\n github.IssueComment.IssueComment,\n self._requester,\n self.url + \"/pulls/comments\",\n url_parameters\n )",
"def comments(self, request_id):\n \"\"\"\n Return comments for request\n \"\"\"\n return self._query_zendesk(self.endpoint.comments, 'comment', id=request_id)",
"def iter_issue_comments(self, number=-1, etag=None):\n \"\"\"Iterate over the issue comments on this pull request.\n\n :param int number: (optional), number of comments to return. Default:\n -1 returns all available comments.\n :param str etag: (optional), ETag from a previous request to the same\n endpoint\n :returns: generator of :class:`IssueComment <IssueComment>`\\ s\n \"\"\"\n url = self._build_url(base_url=self.links['comments'])\n return self._iter(int(number), url, IssueComment, etag=etag)",
"def comments(accountable):\n \"\"\"\n Lists all comments for a given issue key.\n \"\"\"\n comments = accountable.issue_comments()\n headers = sorted(['author_name', 'body', 'updated'])\n\n if comments:\n rows = [[v for k, v in sorted(c.items()) if k in headers]\n for c in comments]\n rows.insert(0, headers)\n print_table(SingleTable(rows))\n else:\n click.secho('No comments found for {}'.format(\n accountable.issue_key\n ), fg='red')",
"def comments(self, extra_params=None):\n \"\"\"\n All Comments in this Ticket\n \"\"\"\n\n # Default params\n params = {\n 'per_page': settings.MAX_PER_PAGE,\n }\n\n if extra_params:\n params.update(extra_params)\n\n return self.api._get_json(\n TicketComment,\n space=self,\n rel_path=self.space._build_rel_path(\n 'tickets/%s/ticket_comments' % self['number']\n ),\n extra_params=params,\n get_all=True, # Retrieve all comments in the ticket\n )",
"def iter_comments(self, number=-1, etag=None):\n \"\"\"Iterate over the comments on this pull request.\n\n :param int number: (optional), number of comments to return. Default:\n -1 returns all available comments.\n :param str etag: (optional), ETag from a previous request to the same\n endpoint\n :returns: generator of :class:`ReviewComment <ReviewComment>`\\ s\n \"\"\"\n url = self._build_url('comments', base_url=self._api)\n return self._iter(int(number), url, ReviewComment, etag=etag)",
"def get_comments(self, endpoint=\"deviation\", deviationid=\"\", commentid=\"\", username=\"\", statusid=\"\", ext_item=False, offset=0, limit=10, maxdepth=0):\n\n \"\"\"Fetch comments\n\n :param endpoint: The source/endpoint you want to fetch comments from (deviation/profile/status/siblings)\n :param deviationid: The deviationid you want to fetch\n :param commentid: The commentid you want to fetch\n :param username: The username you want to get a list of status updates from\n :param statusid: The statusid you want to fetch\n :param ext_item: the pagination limit\n :param offset: the pagination offset\n :param limit: the pagination limit\n :param maxdepth: Depth to query replies until\n \"\"\"\n\n if endpoint == \"deviation\":\n if deviationid:\n response = self._req('/comments/{}/{}'.format(endpoint, deviationid), {\n \"commentid\" : commentid,\n 'offset' : offset,\n 'limit' : limit,\n 'maxdepth' : maxdepth\n })\n else:\n raise DeviantartError(\"No deviationid defined.\")\n\n elif endpoint == \"profile\":\n if username:\n response = self._req('/comments/{}/{}'.format(endpoint, username), {\n \"commentid\" : commentid,\n 'offset' : offset,\n 'limit' : limit,\n 'maxdepth' : maxdepth\n })\n else:\n raise DeviantartError(\"No username defined.\")\n\n elif endpoint == \"status\":\n if statusid:\n response = self._req('/comments/{}/{}'.format(endpoint, statusid), {\n \"commentid\" : commentid,\n 'offset' : offset,\n 'limit' : limit,\n 'maxdepth' : maxdepth\n })\n else:\n raise DeviantartError(\"No statusid defined.\")\n\n elif endpoint == \"siblings\":\n if commentid:\n response = self._req('/comments/{}/{}'.format(commentid, endpoint), {\n \"ext_item\" : ext_item,\n 'offset' : offset,\n 'limit' : limit\n })\n else:\n raise DeviantartError(\"No commentid defined.\")\n else:\n raise DeviantartError(\"Unknown endpoint.\")\n\n comments = []\n\n for item in response['thread']:\n c = Comment()\n c.from_dict(item)\n comments.append(c)\n\n return {\n \"thread\" : comments,\n \"has_less\" : response['has_less'],\n \"has_more\" : response['has_more'],\n \"prev_offset\" : response['prev_offset'],\n \"next_offset\" : response['next_offset']\n }"
] |
[
0.8164628148078918,
0.79473477602005,
0.7838341593742371,
0.7744812369346619,
0.7670895457267761,
0.7670817971229553,
0.7664884924888611,
0.765682578086853,
0.7602823376655579,
0.7534890174865723,
0.7518331408500671,
0.7515272498130798
] |
Returns a boolean if the user in the request has edit permission for the object.
Can also be passed a version object to check if the user has permission to edit a version
of the object (if they own it).
|
def has_edit_permission(self, request, obj=None, version=None):
"""
Returns a boolean if the user in the request has edit permission for the object.
Can also be passed a version object to check if the user has permission to edit a version
of the object (if they own it).
"""
# Has the edit permission for this object type
permission_name = '{}.edit_{}'.format(self.opts.app_label, self.opts.model_name)
has_permission = request.user.has_perm(permission_name)
if obj is not None and has_permission is False:
has_permission = request.user.has_perm(permission_name, obj=obj)
if has_permission and version is not None:
# Version must not be saved, and must belong to this user
if version.version_number or version.owner != request.user:
has_permission = False
return has_permission
|
[
"def get_can_edit(self, obj):\n \"\"\" returns true if user has permission to edit, false otherwise \"\"\"\n view = self.context.get('view')\n request = copy(self.context.get('request'))\n request._method = 'PUT'\n try:\n view.check_object_permissions(request, obj)\n except (PermissionDenied, NotAuthenticated):\n return False\n else:\n return True",
"def has_change_permission(self):\n \"\"\"\n Returns a boolean if the current user has permission to change the current object being\n viewed/edited.\n \"\"\"\n has_permission = False\n\n if self.user is not None:\n # We check for the object level permission here, even though by default the Django\n # admin doesn't. If the Django ModelAdmin is extended to allow object level\n # permissions - then this will work as expected.\n permission_name = '{}.change_{}'.format(self.opts.app_label, self.opts.model_name)\n has_permission = (\n self.user.has_perm(permission_name) or\n self.user.has_perm(permission_name, obj=self.obj)\n )\n\n return has_permission",
"def has_glitter_edit_permission(self, request, obj):\n \"\"\"\n Return a boolean if a user has edit access to the glitter object/page this object is on.\n \"\"\"\n\n # We're testing for the edit permission here with the glitter object - not the current\n # object, not the change permission. Once a user has edit access to an object they can edit\n # all content on it.\n permission_name = '{}.edit_{}'.format(\n obj._meta.app_label, obj._meta.model_name,\n )\n has_permission = (\n request.user.has_perm(permission_name) or\n request.user.has_perm(permission_name, obj=obj)\n )\n return has_permission",
"def hasUserAddEditPermission(self):\n \"\"\"\n Checks if the current user has privileges to access to the editing view.\n From Jira LIMS-1549:\n - Creation/Edit: Lab manager, Client Contact, Lab Clerk, Client Contact (for Client-specific SRTs)\n :returns: True/False\n \"\"\"\n mtool = getToolByName(self, 'portal_membership')\n checkPermission = mtool.checkPermission\n # In bika_samplinground_workflow.csv there are defined the ModifyPortalContent statements. There is said that\n # client has ModifyPortalContent permission enabled, so here we have to check if the client satisfy the\n # condition wrote in the function's description\n if (checkPermission(ModifyPortalContent, self) or checkPermission(AddPortalContent, self)) \\\n and 'Client' in api.user.get_current().getRoles():\n # Checking if the current user is a current client's contact\n userID = api.user.get_current().id\n contact_objs = self.getContacts()\n contact_ids = [obj.getUsername() for obj in contact_objs]\n if userID in contact_ids:\n return True\n else:\n return False\n return checkPermission(ModifyPortalContent, self) or checkPermission(AddPortalContent, self)",
"def can_edit(self, user=None, request=None):\n \"\"\"\n Define if a user can edit or not the instance, according to his account\n or the request.\n \"\"\"\n can = False\n if request and not self.owner:\n if (getattr(settings, \"UMAP_ALLOW_ANONYMOUS\", False)\n and self.is_anonymous_owner(request)):\n can = True\n if self.edit_status == self.ANONYMOUS:\n can = True\n elif not user.is_authenticated:\n pass\n elif user == self.owner:\n can = True\n elif self.edit_status == self.EDITORS and user in self.editors.all():\n can = True\n return can",
"def is_editable(self, request):\n \"\"\"\n Restrict in-line editing to the objects's owner and superusers.\n \"\"\"\n return request.user.is_superuser or request.user.id == self.user_id",
"def has_update_permission(self, request, obj):\n \"\"\"\n Returns True if the requesting user is allowed to update the given object, False otherwise.\n \"\"\"\n perm_string = '%s.change_%s' % (self.model._meta.app_label,\n self.model._meta.object_name.lower()\n )\n return request.user.has_perm(perm_string)",
"def has_object_permission(self, request, view, obj):\n \"\"\"determines if requesting user has permissions for the object\n\n :param request: WSGI request object - where we get the user from\n :param view: the view calling for permission\n :param obj: the object in question\n :return: `bool`\n \"\"\"\n # Give permission if we're not protecting this method\n if self.protected_methods and request.method not in self.protected_methods:\n return True\n\n user = getattr(request, \"user\", None)\n\n if not user or user.is_anonymous():\n return False\n\n if self.require_staff and not user.is_staff:\n return False\n\n # if they have higher-level privileges we can return true right now\n if user.has_perms(self.permissions):\n return True\n\n # no? ok maybe they're the author and have appropriate author permissions.\n authors_field = getattr(obj, self.authors_field, None)\n\n if not authors_field:\n return False\n\n if self.author_permissions and not user.has_perms(self.author_permissions):\n return False\n\n return user in authors_field.all()",
"def can_edit(self, user=None, request=None):\n \"\"\"\n Define if a user can edit or not the instance, according to his account\n or the request.\n \"\"\"\n can = False\n if request and not self.owner:\n if (getattr(settings, \"LEAFLET_STORAGE_ALLOW_ANONYMOUS\", False)\n and self.is_anonymous_owner(request)):\n can = True\n if user and user.is_authenticated():\n # if user is authenticated, attach as owner\n self.owner = user\n self.save()\n msg = _(\"Your anonymous map has been attached to your account %s\" % user)\n messages.info(request, msg)\n if self.edit_status == self.ANONYMOUS:\n can = True\n elif not user.is_authenticated():\n pass\n elif user == self.owner:\n can = True\n elif self.edit_status == self.EDITORS and user in self.editors.all():\n can = True\n return can",
"def has_change_permission(self, request, obj=None):\n \"\"\"\n Returns True if the given request has permission to change the given\n Django model instance, the default implementation doesn't examine the\n `obj` parameter.\n\n Can be overriden by the user in subclasses. In such case it should\n return True if the given request has permission to change the `obj`\n model instance. If `obj` is None, this should return True if the given\n request has permission to change *any* object of the given type.\n \"\"\"\n opts = self.opts\n return request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), obj)",
"def has_permission(self, request):\n \"\"\"Check if user has permission\"\"\"\n if not self.object and not self.permission:\n return True\n\n if not self.permission:\n return request.user.has_perm('{}_{}'.format(\n self.model_permission,\n self.object.__class__.__name__.lower()), self.object\n )\n\n return request.user.has_perm(self.permission)",
"def is_editable(obj, request):\n \"\"\"\n Returns ``True`` if the object is editable for the request. First\n check for a custom ``editable`` handler on the object, otherwise\n use the logged in user and check change permissions for the\n object's model.\n \"\"\"\n if hasattr(obj, \"is_editable\"):\n return obj.is_editable(request)\n else:\n codename = get_permission_codename(\"change\", obj._meta)\n perm = \"%s.%s\" % (obj._meta.app_label, codename)\n return (request.user.is_authenticated() and\n has_site_permission(request.user) and\n request.user.has_perm(perm))"
] |
[
0.8267927169799805,
0.8250415325164795,
0.8135248422622681,
0.7944478392601013,
0.79191654920578,
0.7849165201187134,
0.7844603061676025,
0.773772120475769,
0.7692142128944397,
0.7653836607933044,
0.7638435959815979,
0.7629140019416809
] |
Returns a boolean if the user in the request has publish permission for the object.
|
def has_publish_permission(self, request, obj=None):
"""
Returns a boolean if the user in the request has publish permission for the object.
"""
permission_name = '{}.publish_{}'.format(self.opts.app_label, self.opts.model_name)
has_permission = request.user.has_perm(permission_name)
if obj is not None and has_permission is False:
has_permission = request.user.has_perm(permission_name, obj=obj)
return has_permission
|
[
"def has_publish_permission(self, request, obj=None):\n \"\"\"\n Determines if the user has permissions to publish.\n\n :param request: Django request object.\n :param obj: The object to determine if the user has\n permissions to publish.\n :return: Boolean.\n \"\"\"\n # If auto-publishing is enabled, no user has \"permission\" to publish\n # because it happens automatically\n if is_automatic_publishing_enabled(self.model):\n return False\n user_obj = request.user\n if not user_obj.is_active:\n return False\n if user_obj.is_superuser:\n return True\n # Normal user with `can_publish` permission can always publish\n if user_obj.has_perm('%s.can_publish' % self.opts.app_label):\n return True\n # Normal user with `can_republish` permission can only publish if the\n # item is already published.\n if user_obj.has_perm('%s.can_republish' % self.opts.app_label) and \\\n obj and getattr(obj, 'has_been_published', False):\n return True\n # User does not meet any publishing permisison requirements; reject!\n return False",
"def has_preview_permission(self, request, obj=None):\n \"\"\"\n Return `True` if the user has permissions to preview a publishable\n item.\n\n NOTE: this method does not actually change who can or cannot preview\n any particular item, just whether to show the preview link. The real\n dcision is made by a combination of:\n\n - `PublishingMiddleware` which chooses who can view draft content\n - the view code for a particular item, which may or may not render\n draft content for a specific user.\n\n :param request: Django request object.\n :param obj: The object the user would preview, if permitted.\n :return: Boolean.\n \"\"\"\n # User who can publish always has preview permission.\n if self.has_publish_permission(request, obj=obj):\n return True\n user_obj = request.user\n if not user_obj.is_active:\n return False\n if user_obj.is_staff:\n return True\n return False",
"def has_object_permission(self, request, view, obj):\n \"\"\"determines if requesting user has permissions for the object\n\n :param request: WSGI request object - where we get the user from\n :param view: the view calling for permission\n :param obj: the object in question\n :return: `bool`\n \"\"\"\n # Give permission if we're not protecting this method\n if self.protected_methods and request.method not in self.protected_methods:\n return True\n\n user = getattr(request, \"user\", None)\n\n if not user or user.is_anonymous():\n return False\n\n if self.require_staff and not user.is_staff:\n return False\n\n # if they have higher-level privileges we can return true right now\n if user.has_perms(self.permissions):\n return True\n\n # no? ok maybe they're the author and have appropriate author permissions.\n authors_field = getattr(obj, self.authors_field, None)\n\n if not authors_field:\n return False\n\n if self.author_permissions and not user.has_perms(self.author_permissions):\n return False\n\n return user in authors_field.all()",
"def has_permission(self, request):\n \"\"\"Check if user has permission\"\"\"\n if not self.object and not self.permission:\n return True\n\n if not self.permission:\n return request.user.has_perm('{}_{}'.format(\n self.model_permission,\n self.object.__class__.__name__.lower()), self.object\n )\n\n return request.user.has_perm(self.permission)",
"def has_update_permission(self, request, obj):\n \"\"\"\n Returns True if the requesting user is allowed to update the given object, False otherwise.\n \"\"\"\n perm_string = '%s.change_%s' % (self.model._meta.app_label,\n self.model._meta.object_name.lower()\n )\n return request.user.has_perm(perm_string)",
"def has_csv_permission(self, request, obj=None):\n \"\"\"\n Returns True if the given request has permission to add an object.\n Can be overridden by the user in subclasses. By default, we assume\n all staff users can use this action unless `DJANGO_EXPORTS_REQUIRE_PERM`\n is set to True in your django settings.\n \"\"\"\n if getattr(settings, 'DJANGO_EXPORTS_REQUIRE_PERM', None):\n opts = self.opts\n codename = '%s_%s' % ('csv', opts.object_name.lower())\n return request.user.has_perm(\"%s.%s\" % (opts.app_label, codename))\n return True",
"public boolean isCanPublish() {\n\n return OpenCms.getWorkplaceManager().getDefaultUserSettings().isAllowBrokenRelations()\n || OpenCms.getRoleManager().hasRole(getCms(), CmsRole.VFS_MANAGER);\n }",
"def has_edit_permission(self, request, obj=None, version=None):\n \"\"\"\n Returns a boolean if the user in the request has edit permission for the object.\n\n Can also be passed a version object to check if the user has permission to edit a version\n of the object (if they own it).\n \"\"\"\n # Has the edit permission for this object type\n permission_name = '{}.edit_{}'.format(self.opts.app_label, self.opts.model_name)\n has_permission = request.user.has_perm(permission_name)\n\n if obj is not None and has_permission is False:\n has_permission = request.user.has_perm(permission_name, obj=obj)\n\n if has_permission and version is not None:\n # Version must not be saved, and must belong to this user\n if version.version_number or version.owner != request.user:\n has_permission = False\n\n return has_permission",
"def has_add_permission(self, request):\n \"\"\"\n Returns True if the requesting user is allowed to add an object, False otherwise.\n \"\"\"\n perm_string = '%s.add_%s' % (self.model._meta.app_label,\n self.model._meta.object_name.lower()\n )\n return request.user.has_perm(perm_string)",
"def has_object_permission(self, request, view, obj):\n \"\"\"Check object permissions.\"\"\"\n # admins can do anything\n if request.user.is_superuser:\n return True\n\n # `share` permission is required for editing permissions\n if 'permissions' in view.action:\n self.perms_map['POST'] = ['%(app_label)s.share_%(model_name)s']\n\n if view.action in ['add_data', 'remove_data']:\n self.perms_map['POST'] = ['%(app_label)s.add_%(model_name)s']\n\n if hasattr(view, 'get_queryset'):\n queryset = view.get_queryset()\n else:\n queryset = getattr(view, 'queryset', None)\n\n assert queryset is not None, (\n 'Cannot apply DjangoObjectPermissions on a view that '\n 'does not set `.queryset` or have a `.get_queryset()` method.'\n )\n\n model_cls = queryset.model\n user = request.user\n\n perms = self.get_required_object_permissions(request.method, model_cls)\n\n if not user.has_perms(perms, obj) and not AnonymousUser().has_perms(perms, obj):\n # If the user does not have permissions we need to determine if\n # they have read permissions to see 403, or not, and simply see\n # a 404 response.\n\n if request.method in permissions.SAFE_METHODS:\n # Read permissions already checked and failed, no need\n # to make another lookup.\n raise Http404\n\n read_perms = self.get_required_object_permissions('GET', model_cls)\n if not user.has_perms(read_perms, obj):\n raise Http404\n\n # Has read permissions.\n return False\n\n return True",
"def has_object_permission(self, request, view, obj):\n \"\"\"\n Allow staff or superusers, and the owner of the object itself.\n \"\"\"\n user = request.user\n if not user.is_authenticated:\n return False\n elif user.is_staff or user.is_superuser:\n return True\n return user == obj.user",
"def has_export_permission(self, request):\n \"\"\"\n Returns whether a request has export permission.\n \"\"\"\n EXPORT_PERMISSION_CODE = getattr(settings, 'IMPORT_EXPORT_EXPORT_PERMISSION_CODE', None)\n if EXPORT_PERMISSION_CODE is None:\n return True\n\n opts = self.opts\n codename = get_permission_codename(EXPORT_PERMISSION_CODE, opts)\n return request.user.has_perm(\"%s.%s\" % (opts.app_label, codename))"
] |
[
0.898836612701416,
0.8384897112846375,
0.7753359079360962,
0.7683749198913574,
0.7435063123703003,
0.741520881652832,
0.7396457195281982,
0.7391578555107117,
0.7368825674057007,
0.7341288924217224,
0.7313709855079651,
0.7284159064292908
] |
Get a valid semantic version for tag
|
def semantic_version(tag):
"""Get a valid semantic version for tag
"""
try:
version = list(map(int, tag.split('.')))
assert len(version) == 3
return tuple(version)
except Exception as exc:
raise CommandError(
'Could not parse "%s", please use '
'MAJOR.MINOR.PATCH' % tag
) from exc
|
[
"def validate_version(self, prefix='v'):\n \"\"\"Validate version by checking if it is a valid semantic version\n and its value is higher than latest github tag\n \"\"\"\n version = self.software_version()\n repo = self.github_repo()\n repo.releases.validate_tag(version, prefix)\n return version",
"function normVersion(tag) {\n var parts = new semver.SemVer(tag);\n var prerelease = \"\";\n\n if (parts.prerelease && parts.prerelease.length > 0) {\n prerelease = hashPrerelease(parts.prerelease);\n }\n\n return [\n parts.major,\n parts.minor,\n parts.patch\n ].join('.') + (prerelease? '.'+prerelease : '');\n}",
"protected function getVersion()\n {\n // Get version from tag\n $cmd = new Command('git');\n $cmd = $cmd->arg('tag')\n ->execute();\n $output = explode(PHP_EOL, trim($cmd->getOutput()));\n $currentVersion = '0.0.0';\n foreach ($output as $tag) {\n if (preg_match(SemanticVersion::REGEX, $tag)) {\n if (version_compare($currentVersion, $tag, '<')) {\n $currentVersion = $tag;\n }\n }\n }\n return new SemanticVersion($currentVersion);\n }",
"def git_tag_to_semver(git_tag: str) -> SemVer:\n \"\"\"\n :git_tag: A string representation of a Git tag.\n\n Searches a Git tag's string representation for a SemVer, and returns that\n as a SemVer object.\n \"\"\"\n pattern = re.compile(r'[0-9]+\\.[0-9]+\\.[0-9]+$')\n match = pattern.search(git_tag)\n if match:\n version = match.group(0)\n else:\n raise InvalidTagFormatException('Tag passed contains no SemVer.')\n\n return SemVer.from_str(version)",
"def prefix_tag(tag)\n tag = Origen::VersionString.new(tag)\n if tag.semantic?\n tag.prefixed\n else\n tag\n end\n end",
"def get_tag_version(nrml_node):\n \"\"\"\n Extract from a node of kind NRML the tag and the version. For instance\n from '{http://openquake.org/xmlns/nrml/0.4}fragilityModel' one gets\n the pair ('fragilityModel', 'nrml/0.4').\n \"\"\"\n version, tag = re.search(r'(nrml/[\\d\\.]+)\\}(\\w+)', nrml_node.tag).groups()\n return tag, version",
"function toSemver(tag) {\n var parts = tag.split('.');\n var version = parts.slice(0, 3).join('.');\n var prerelease = Number(parts[3]);\n\n // semver == windows version\n if (!prerelease) return version;\n\n var channelId = Math.floor(prerelease/CHANNEL_MAGINITUDE);\n var channel = CHANNELS[channelId - 1];\n var count = prerelease - (channelId*CHANNEL_MAGINITUDE);\n\n return version + '-' + channel + '.' + count\n}",
"def tagversion(repo, level='patch', special=''):\n \"\"\"Increment and return tagged version in git.\n Increment levels are patch, minor and major.\n\n Using semver.org versioning: {major}.{minor}.{patch}{special}\n Special must start with a-z and consist of _a-zA-Z0-9.\n \"\"\"\n prepend = 'v'\n with lcd(repo):\n oldversion = local(\n 'git describe --abbrev=0 --tags', capture=True).strip()\n if oldversion.startswith('v'):\n oldversion = oldversion[1:]\n else:\n prepend = ''\n major, minor, patch = [int(x) for x in re.split('\\D', oldversion, 3)[:3]]\n if special:\n if not re.match('^[a-z]', special):\n raise ValueError('Special must start with a-z')\n if not re.match('[_a-zA-Z0-9]+', special):\n raise ValueError('Must contain start with lowercase letter')\n if level == 'major':\n major, minor, patch = major + 1, 0, 0\n elif level == 'minor':\n major, minor, patch = major, minor + 1, 0\n elif level == 'patch':\n major, minor, patch = major, minor, patch + 1\n version_string = '{}.{}.{}'.format(major, minor, patch) + special\n with lcd(repo):\n local('git tag -s --force {}{}'.format(prepend, version_string))\n return version_string",
"def get_version(self):\n \"\"\"\n Return the stringified version passed to the templatetag.\n \"\"\"\n if not self.node.version:\n return None\n try:\n version = smart_str('%s' % self.node.version.resolve(self.context))\n except template.VariableDoesNotExist:\n raise template.TemplateSyntaxError('\"%s\" tag got an unknown variable: %r' %\n (self.node.nodename, self.node.version.var))\n\n return '%s' % version",
"def validate_version(version)\n return if SemanticPuppet::Version.valid?(version)\n\n err = _(\"version string cannot be parsed as a valid Semantic Version\")\n raise ArgumentError, _(\"Invalid 'version' field in metadata.json: %{err}\") % { err: err }\n end",
"def validate_tag(self, tag_name, prefix=None):\n \"\"\"Validate ``tag_name`` with the latest tag from github\n\n If ``tag_name`` is a valid candidate, return the latest tag from github\n \"\"\"\n new_version = semantic_version(tag_name)\n current = self.latest()\n if current:\n tag_name = current['tag_name']\n if prefix:\n tag_name = tag_name[len(prefix):]\n tag_name = semantic_version(tag_name)\n if tag_name >= new_version:\n what = 'equal to' if tag_name == new_version else 'older than'\n raise GithubException(\n 'Your local version \"%s\" is %s '\n 'the current github version \"%s\".\\n'\n 'Bump the local version to '\n 'continue.' %\n (\n str(new_version),\n what,\n str(tag_name)\n )\n )\n return current",
"function validSemverTag(list, tag) {\n if (semver.valid(tag)) {\n list.push(tag);\n }\n\n return list;\n}"
] |
[
0.7959873676300049,
0.7770457863807678,
0.7640970945358276,
0.7486236095428467,
0.741369903087616,
0.7395448684692383,
0.7371225357055664,
0.7266147136688232,
0.7261949777603149,
0.7255418300628662,
0.7233297824859619,
0.7209794521331787
] |
Function load
Store the object data
|
def load(self, data):
""" Function load
Store the object data
"""
self.clear()
self.update(data)
self.enhance()
|
[
"function loadObject(data, db, callback) {\n callback = callback || function() {};\n var iterator = function(modelName, next){\n insertCollection(modelName, data[modelName], db, next);\n };\n async.forEachSeries(Object.keys(data), iterator, callback);\n}",
"function(data, depth) {\n depth = depth || 0;\n\n // If root, store the data\n if (!depth) {\n this.data = data;\n }\n\n // Store the data indexed by URL\n if (data.url) {\n this.dataMap[data.url] = data;\n }\n\n if (data.children) {\n for (var i = 0, l = data.children.length; i < l; i++) {\n this.load(data.children[i], depth + 1);\n }\n }\n }",
"final protected function _loadData()\n {\n if (!$this->isLoaded()) {\n $this->_object_set = new ArrayObject();\n foreach ($this->_getData() as $object) {\n $this->insert($object);\n }\n }\n }",
"function load() {\n var key, data, value, pos = 0;\n\n items = {};\n\n // localStorage can be disabled on WebKit/Gecko so make a dummy storage\n if (!hasOldIEDataSupport) {\n return;\n }\n\n function next(end) {\n var value, nextPos;\n\n nextPos = end !== undefined ? pos + end : data.indexOf(',', pos);\n if (nextPos === -1 || nextPos > data.length) {\n return null;\n }\n\n value = data.substring(pos, nextPos);\n pos = nextPos + 1;\n\n return value;\n }\n\n storageElm.load(userDataKey);\n data = storageElm.getAttribute(userDataKey) || '';\n\n do {\n var offset = next();\n if (offset === null) {\n break;\n }\n\n key = next(parseInt(offset, 32) || 0);\n if (key !== null) {\n offset = next();\n if (offset === null) {\n break;\n }\n\n value = next(parseInt(offset, 32) || 0);\n\n if (key) {\n items[key] = value;\n }\n }\n } while (key !== null);\n\n updateKeys();\n }",
"function (data, oncomplete, onprogress, onerror) {\n \n var i = 0, l = data.length, current, obj, total = l, j = 0, ext = \"\" ;\n \n //Progress function\n function pro(){\n var src = this.src;\n \n //Remove events cause audio trigger this event more than once(depends on browser)\n if (this.removeEventListener) { \n this.removeEventListener('canplaythrough', pro, false); \n }\n \n ++j;\n //if progress callback, give information of assets loaded, total and percent\n if (onprogress) \n onprogress({\n loaded: j, \n total: total, \n percent: (j / total * 100),\n src:src\n });\n\t\t\t\t\n if(j === total && oncomplete) oncomplete();\n };\n //Error function\n function err(){\n var src = this.src;\n if (onerror) \n onerror({\n loaded: j, \n total: total, \n percent: (j / total * 100),\n src:src\n });\n \t\t\n j++;\n if(j === total && oncomplete) oncomplete();\n };\n \n for (; i < l; ++i) { \n current = data[i];\n ext = current.substr(current.lastIndexOf('.') + 1, 3).toLowerCase();\n \n obj = Crafty.asset(current) || null; \n \n if (Crafty.support.audio && Crafty.audio.supported[ext]) { \n //Create new object if not exists\n if(!obj){\n var name = current.substr(current.lastIndexOf('/') + 1).toLowerCase();\n obj = Crafty.audio.audioElement();\n obj.id = name;\n obj.src = current;\n obj.preload = \"auto\";\n obj.volume = Crafty.audio.volume;\n Crafty.asset(current, obj);\n Crafty.audio.sounds[name] = {\n obj:obj,\n played:0\n } \n }\n \n //addEventListener is supported on IE9 , Audio as well\n if (obj.addEventListener) { \n obj.addEventListener('canplaythrough', pro, false); \n }\n \n \n } else if (Crafty.image_whitelist.indexOf(ext) >= 0) { \n if(!obj) {\n obj = new Image();\n Crafty.asset(current, obj); \n }\n obj.onload=pro;\n obj.src = \"\"; // workaround for webkit bug\n obj.src = current; //setup src after onload function Opera/IE Bug\n \n } else {\n total--;\n continue; //skip if not applicable\n }\n obj.onerror = err;\n }\n \n \n }",
"function ( url, onLoad, onProgress, onError, onMeshAlter, useAsync ) {\n\t\tvar resource = new THREE.LoaderSupport.ResourceDescriptor( url, 'OBJ' );\n\t\tthis._loadObj( resource, onLoad, onProgress, onError, onMeshAlter, useAsync );\n\t}",
"function(data) {\n // Data indexing\n this._rtree = rbush(9);\n data = data || [];\n var array = [];\n var that = this;\n function index(d) {\n var bbox = that._getBoundingBox(d);\n if (bbox) {\n var key = that._toIndexKey(bbox);\n key.data = d;\n array.push(key);\n }\n }\n if (typeof data === 'function') {\n data = data();\n }\n if (typeof data.forEach === 'function') {\n data.forEach(index);\n } else if (data.length) {\n for (var i = 0; i < data.length; i++) {\n index(data[i]);\n }\n }\n this._rtree.load(array);\n }",
"function postload(e){\n\tif(preinit){\n\t\tpreinit = false;\n\t\tfor(var i=0;i<loadfun.length;i++){ loadfun[i]();}\n\t\tloadfun = [];\n\t}\n}",
"function(err) {\n if (!err) {\n loadData(app, apimanager, models, true, uid);\n }\n if (!apimanager.host) {\n //monitor the file changes, load data again if any changes\n fs.watch(definitionsDir, function(event, filename) {\n if (filename !== '.datastore') {\n logger.debug('File changed in %s%s, reload data', definitionsDir, filename);\n loadData(app, apimanager, models, false, uid);\n }\n });\n }\n }",
"function load() {\n if (process.env.BABEL_DISABLE_CACHE) return;\n\n process.on(\"exit\", save);\n process.nextTick(save);\n\n if (!_pathExists2[\"default\"].sync(FILENAME)) return;\n\n try {\n data = JSON.parse(_fs2[\"default\"].readFileSync(FILENAME));\n } catch (err) {\n return;\n }\n}",
"function ( obj ) {\n\t\tvar l = abaaso.loading;\n\n\t\tif ( l.url === null || obj === undefined ) {\n\t\t\tthrow new Error( label.error.invalidArguments );\n\t\t}\n\n\t\t// Setting loading image\n\t\tif ( l.image === undefined ) {\n\t\t\tl.image = new Image();\n\t\t\tl.image.src = l.url;\n\t\t}\n\n\t\t// Clearing target element\n\t\telement.clear( obj );\n\n\t\t// Creating loading image in target element\n\t\telement.create( \"img\", {alt: label.common.loading, src: l.image.src}, element.create( \"div\", {\"class\": \"loading\"}, obj ) );\n\n\t\treturn obj;\n\t}",
"function load() {\n Desktop.fetch('localStorage', 'getAll').then((storage) => {\n Meteor._localStorage.storage = storage;\n }).catch(() => {\n retries += 1;\n if (retries < 5) {\n load();\n } else {\n console.error('failed to load localStorage contents');\n }\n });\n}"
] |
[
0.7398872375488281,
0.7252195477485657,
0.7247742414474487,
0.7204188108444214,
0.7134860754013062,
0.712976336479187,
0.7112202048301697,
0.7068856358528137,
0.704858124256134,
0.703584611415863,
0.7005787491798401,
0.6986722946166992
] |
Function enhance
Enhance the object with new item or enhanced items
|
def enhance(self):
""" Function enhance
Enhance the object with new item or enhanced items
"""
if self.objName in ['hosts', 'hostgroups',
'puppet_classes']:
from foreman.itemSmartClassParameter\
import ItemSmartClassParameter
self.update({'smart_class_parameters':
SubDict(self.api, self.objName,
self.payloadObj, self.key,
ItemSmartClassParameter)})
|
[
"def enhance(self):\n \"\"\" Function enhance\n Enhance the object with new item or enhanced items\n \"\"\"\n self.update({'os_default_templates':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemOsDefaultTemplate)})\n self.update({'config_templates':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemConfigTemplate)})\n self.update({'ptables':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemPTable)})\n self.update({'media':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemMedia)})\n self.update({'architectures':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemArchitecture)})",
"def enhance(self):\n \"\"\" Function enhance\n Enhance the object with new item or enhanced items\n \"\"\"\n self.update({'parameters':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemParameter)})\n self.update({'interfaces':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemInterface)})\n self.update({'subnets':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemSubnet)})",
"def enhance(self):\n \"\"\" Function enhance\n Enhance the object with new item or enhanced items\n \"\"\"\n self.update({'images':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemImages)})",
"def enhance(self):\n \"\"\" Function enhance\n Enhance the object with new item or enhanced items\n \"\"\"\n self.update({'os_default_templates':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemOsDefaultTemplate)})\n self.update({'operatingsystems':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemOperatingSystem)})",
"def enhance(self):\n \"\"\" Function enhance\n Enhance the object with new item or enhanced items\n \"\"\"\n self.update({'puppetclasses':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemPuppetClasses)})\n self.update({'parameters':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemParameter)})\n self.update({'interfaces':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemInterface)})\n self.update({'smart_class_parameters':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemSmartClassParameter)})",
"def enhance(self):\n \"\"\" Function enhance\n Enhance the object with new item or enhanced items\n \"\"\"\n self.update({'puppetclasses':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemPuppetClasses)})\n self.update({'parameters':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n SubItemParameter)})\n self.update({'smart_class_parameters':\n SubDict(self.api, self.objName,\n self.payloadObj, self.key,\n ItemSmartClassParameter)})",
"public T enhance(T t) {\n if (!needsEnhancement(t)) {\n return t;\n }\n\n try {\n return getEnhancedClass().getConstructor(baseClass).newInstance(t);\n } catch (Exception e) {\n throw new RuntimeException(String.format(\"Could not enhance object %s (%s)\", t, t.getClass()), e);\n }\n }",
"private static void doEnhancement(CtClass cc, Version modelVersion) throws CannotCompileException,\n NotFoundException, ClassNotFoundException {\n CtClass inter = cp.get(OpenEngSBModel.class.getName());\n cc.addInterface(inter);\n addFields(cc);\n addGetOpenEngSBModelTail(cc);\n addSetOpenEngSBModelTail(cc);\n addRetrieveModelName(cc);\n addRetrieveModelVersion(cc, modelVersion);\n addOpenEngSBModelEntryMethod(cc);\n addRemoveOpenEngSBModelEntryMethod(cc);\n addRetrieveInternalModelId(cc);\n addRetrieveInternalModelTimestamp(cc);\n addRetrieveInternalModelVersion(cc);\n addToOpenEngSBModelValues(cc);\n addToOpenEngSBModelEntries(cc);\n cc.setModifiers(cc.getModifiers() & ~Modifier.ABSTRACT);\n }",
"def enhance(self, inverse=False, gamma=1.0, stretch=\"no\",\n stretch_parameters=None, **kwargs):\n \"\"\"Image enhancement function. It applies **in this order** inversion,\n gamma correction, and stretching to the current image, with parameters\n *inverse* (see :meth:`Image.invert`), *gamma* (see\n :meth:`Image.gamma`), and *stretch* (see :meth:`Image.stretch`).\n \"\"\"\n self.invert(inverse)\n if stretch_parameters is None:\n stretch_parameters = {}\n\n stretch_parameters.update(kwargs)\n self.stretch(stretch, **stretch_parameters)\n self.gamma(gamma)",
"def load(self, data):\n \"\"\" Function load\n Store the object data\n \"\"\"\n self.clear()\n self.update(data)\n self.enhance()",
"def enhance2dataset(dset):\n \"\"\"Apply enhancements to dataset *dset* and return the resulting data\n array of the image.\"\"\"\n attrs = dset.attrs\n img = get_enhanced_image(dset)\n # Clip image data to interval [0.0, 1.0]\n data = img.data.clip(0.0, 1.0)\n data.attrs = attrs\n\n return data",
"function enhancedEcommerceProductAction(track, action, data) {\n enhancedEcommerceTrackProduct(track);\n window.ga('ec:setAction', action, data || {});\n}"
] |
[
0.8752948045730591,
0.8729672431945801,
0.8696154356002808,
0.8695272207260132,
0.8679497838020325,
0.8630505800247192,
0.7447202205657959,
0.7079142928123474,
0.707258939743042,
0.6839107275009155,
0.6810131072998047,
0.6799886226654053
] |
Function reload
Sync the full object
|
def reload(self):
""" Function reload
Sync the full object
"""
self.load(self.api.get(self.objName, self.key))
|
[
"def reload(self):\n \"\"\" Function reload\n Reload the full object to ensure sync\n \"\"\"\n realData = self.load()\n self.clear()\n self.update(realData)",
"function reload(args) {\n if (args !== undefined) {\n if (args.l !== undefined) {\n fs.closeSync(1);\n fs.openSync(args.l, 'a+');\n }\n\n if (args.e !== undefined) {\n fs.closeSync(2);\n fs.openSync(args.e, 'a+');\n }\n }\n}",
"function reload() {\n var defer = q.defer();\n\n if (browserSync.active) {\n browserSync.reload();\n defer.resolve();\n } else\n startServer().then(defer.resolve);\n\n return defer.promise;\n}",
"def refresh(self):\n # type: () -> None\n \"\"\"Refresh the object in place.\"\"\"\n src = self._client.reload(self)\n self.__dict__.update(src.__dict__)",
"function sync() {\n syncDom.setAttribute('data-sync-state', 'syncing');\n var opts = {continuous: true, complete: syncError};\n db.replicate.to(remoteCouch, opts);\n db.replicate.from(remoteCouch, opts);\n }",
"function reload() {\n\t\t\tvar params = {}\n\n\t\t\tparams[model.primary] = me[model.primary]\n\t\t\n\t\t\tvar act = new NobleMachine(model.find(params));\n\n\t\t\tact.next(function(newInst) {\n\t\t\t\tif (newInst) {\n\t\t\t\t\tfor (var col in model.columns) {\n\t\t\t\t\t\tme[col] = newInst[col];\n\t\t\t\t\t}\n\n\t\t\t\t\tact.toNext(me);\n\t\t\t\t} else {\n\t\t\t\t\tfor (var key in model.columns) {\n\t\t\t\t\t\tme[key] = undefined\n\t\t\t\t\t}\n\n\t\t\t\t\tact.toNext(null);\n\t\t\t\t}\n\t\t\t});\n\n\t\t\treturn act;\n\t\t}",
"final public function reload() {\n\n\t\t$app = Application::getInstance();\n\t\t$class = get_called_class();\n\t\t\n\t\t// properties to not reset\n\t\t$propertiesToSave = array('keyProperties', 'db', 'loadedFromDb', 'typeList', 'cache', 'errors');\n\t\t\n\t\t// save key from being unset\n\t\t$propertiesToSave = array_merge($propertiesToSave, $this->keyProperties);\n\n\t\t// unset all the other properties\n\t\tforeach ($this as $key => $value) {\n\t\t\tif (!in_array($key, $propertiesToSave)) {\n\t\t\t\tunset($this->$key);\n\t\t\t}\n\t\t}\n\t\t\n\t\t$this->cache = array();\n\t\t$this->errors = array();\n\t\t\n\t\t$this->loadFromDb($this->getSqlKeyValues());\n\t\t\n\t\t// log the reload \n\t\t$app->logEvent('Reloaded ' . $class . ' object with ' . $this->getKeyForEventlog());\n\t\t\n\t}",
"def refresh(self):\n # type: () -> None\n \"\"\"Refresh the object in place.\"\"\"\n from pykechain.client import API_EXTRA_PARAMS\n src = self._client.reload(self, extra_params=API_EXTRA_PARAMS['activity'])\n self.__dict__.update(src.__dict__)",
"function() {\n var self = this\n self._shouldSyncFromOpts = true\n self.on('update', function() {\n if (self._shouldSyncFromOpts) self.trigger('sync')\n self._shouldSyncFromOpts = true\n })\n }",
"public function reload()\n {\n if ($this['object'] === 'event') {\n parent::g_reload(self::getUrl($this['id']));\n } else {\n parent::g_reload(self::getUrl());\n }\n }",
"function sendReload() {\n if (saveChecksum) {\n $.ajax({\n url: jsbin.getURL() + '/reload',\n data: {\n code: jsbin.state.code,\n revision: jsbin.state.revision,\n checksum: saveChecksum\n },\n type: 'post'\n });\n }\n}",
"async function reload() {\n const requestOptions = {\n hostname: 'localhost',\n port: 8081,\n path: '/reloadapp',\n method: 'HEAD',\n };\n\n const req = http.request(requestOptions, () => {\n clear();\n logger.done('Sent reload request');\n req.end();\n });\n\n req.on('error', e => {\n clear();\n const error = e.toString();\n if (error.includes('connect ECONNREFUSED')) {\n logger.error(`Reload request failed. Make sure Haul is up.`);\n } else {\n logger.error(e);\n }\n });\n\n req.end();\n}"
] |
[
0.8672438859939575,
0.7427269220352173,
0.7301700711250305,
0.7258812189102173,
0.725823163986206,
0.7242302298545837,
0.7187184691429138,
0.7158632874488831,
0.7093992233276367,
0.7003313302993774,
0.7002490758895874,
0.6987038254737854
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.