repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,222,525,072B
| line_mean
float64 6.51
99.8
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|
madoodia/codeLab
|
python/modules_platform.py
|
1
|
1829
|
# ===============================================
# MODULE STUDY: platform
import platform
################################ Cross Platform ################################
platform.architecture() # Returns a tuple (bits, linkage)
platform.machine() # Returns the machine type, e.g. 'i386'
platform.node() # Returns the computer’s network name
platform.platform() # Returns a single string identifying the underlying platform with as much useful information as possible.
platform.processor() # Returns the (real) processor name, e.g. 'amdk6'.
platform.python_build() # Returns a tuple (buildno, builddate) stating the Python build number and date as strings.
platform.python_compiler() # Returns a string identifying the compiler used for compiling Python.
platform.python_branch() # Returns a string identifying the Python implementation SCM branch.
platform.python_implementation() # Returns a string identifying the Python implementation
platform.python_revision() # Returns a string identifying the Python implementation SCM revision.
platform.python_version() # Returns the Python version as string 'major.minor.patchlevel'
platform.python_version_tuple() # Returns the Python version as tuple (major, minor, patchlevel) of strings.
platform.release() # Returns the system’s release, e.g. '2.2.0' or 'NT'
platform.system() # Returns the system/OS name, e.g. 'Linux', 'Windows', or 'Java'
platform.version() # Returns the system’s release version
platform.uname() # Fairly portable uname interface.
# Returns a tuple of strings (system, node, release, version, machine, processor) identifying the underlying platform.
platform.win32_ver() # Availability: windows
|
mit
| 965,381,198,625,149,200
| 39.511111
| 146
| 0.664838
| false
|
annapowellsmith/openpresc
|
openprescribing/pipeline/tests/test_pipeline.py
|
1
|
16563
|
import mock
import os
import json
from django.conf import settings
from django.test import TestCase, override_settings
from pipeline.models import TaskLog
from pipeline.runner import load_tasks, run_task, in_progress
class PipelineTests(TestCase):
def setUp(self):
# Load tasks
self.tasks = load_tasks()
# Set up dummy files on filesystem
for source_id, year_and_month, filename in [
['source_a', '2017_01', 'source_a.csv'],
['source_a', '2017_02', 'source_a.csv'],
['source_a', '2017_03', 'source_a.csv'],
['source_b', '2017_01', 'source_b_1701.csv'],
['source_b', '2017_02', 'source_b_1702.csv'],
['source_b', '2017_03', 'source_b_1703.csv'],
['source_c', '2017_01', 'source_c1.csv'],
['source_c', '2017_01', 'source_c2.csv'],
['source_c', '2017_02', 'source_c1.csv'],
['source_c', '2017_02', 'source_c2.csv'],
]:
path = build_path(source_id, year_and_month, filename)
dir_path = os.path.dirname(path)
try:
os.makedirs(dir_path)
except OSError as e:
import errno
if e.errno != errno.EEXIST or not os.path.isdir(dir_path):
raise
with open(path, 'w') as f:
f.write('1,2,3\n')
# Set up dummy log data
log_data = {
'source_a': [
{
'imported_file': build_path(
'source_a',
'2017_01',
'source_a.csv'
),
'imported_at': '2017-01-01T12:00:00'
},
{
'imported_file': build_path(
'source_a',
'2017_02',
'source_a.csv'
),
'imported_at': '2017-02-01T12:00:00'
}
],
'source_b': [
{
'imported_file': build_path(
'source_b',
'2017_01',
'source_b_1701.csv'
),
'imported_at': '2017-01-01T12:00:00'
},
{
'imported_file': build_path(
'source_b',
'2017_02',
'source_b_1702.csv'
),
'imported_at': '2017-02-01T12:00:00'
}
],
'source_c': [
{
'imported_file': build_path(
'source_c',
'2017_01',
'source_c2.csv'
),
'imported_at': '2017-01-01T12:00:00'
},
{
'imported_file': build_path(
'source_c',
'2017_02',
'source_c2.csv'
),
'imported_at': '2017-02-01T12:00:00'
}
]
}
with open(settings.PIPELINE_IMPORT_LOG_PATH, 'w') as f:
json.dump(log_data, f)
def test_task_initialisation(self):
task = self.tasks['fetch_source_a']
self.assertEqual(task.name, 'fetch_source_a')
self.assertEqual(task.task_type, 'manual_fetch')
self.assertEqual(task.source_id, 'source_a')
self.assertEqual(task.dependencies, [])
task = self.tasks['convert_source_a']
self.assertEqual(task.dependency_names, ['fetch_source_a'])
def test_load_real_tasks(self):
# We're just checking that no exceptions get raised here
path = os.path.join(settings.APPS_ROOT, 'pipeline', 'metadata')
with override_settings(PIPELINE_METADATA_DIR=path):
load_tasks()
def test_run_real_tasks(self):
# We're not actually going to run the management commands, but we're
# going to check that the management commands exist and can be run with
# the given input
path = os.path.join(settings.APPS_ROOT, 'pipeline', 'metadata')
with override_settings(PIPELINE_METADATA_DIR=path):
tasks = load_tasks()
with mock.patch('django.core.management.base.BaseCommand.execute'):
for task in tasks.by_type('auto_fetch'):
task.run(2017, 7)
with mock.patch('pipeline.runner.Task.unimported_paths',
return_value=['/some/path']):
for task in tasks.by_type('convert'):
task.run(2017, 7)
with mock.patch('pipeline.runner.Task.unimported_paths',
return_value=['/some/path']):
for task in tasks.by_type('import'):
task.run(2017, 7)
for task in tasks.by_type('post_process'):
task.run(2017, 7, last_imported='2017_01')
def test_tasks_by_type(self):
tasks = self.tasks.by_type('manual_fetch')
self.assertIn('fetch_source_a', [task.name for task in tasks])
tasks = self.tasks.by_type('auto_fetch')
self.assertIn('fetch_source_b', [task.name for task in tasks])
def test_tasks_ordered(self):
task_names = [task.name for task in self.tasks.ordered()]
for name1, name2 in [
['fetch_source_a', 'convert_source_a'],
['convert_source_a', 'import_source_a'],
['fetch_source_b', 'import_source_b'],
['import_source_a', 'import_source_b'],
['fetch_source_c', 'import_source_c1'],
['import_source_a', 'import_source_c1'],
['import_source_b', 'import_source_c1'],
['fetch_source_c', 'import_source_c2'],
['import_source_c1', 'import_source_c2'],
['import_source_a', 'post_process'],
['import_source_b', 'post_process'],
['import_source_c1', 'post_process'],
]:
self.assertTrue(task_names.index(name1) < task_names.index(name2))
def test_tasks_by_type_ordered(self):
tasks = self.tasks.by_type('import').ordered()
task_names = [task.name for task in tasks]
expected_output = [
'import_source_a',
'import_source_b',
'import_source_c1',
'import_source_c2',
]
self.assertEqual(task_names, expected_output)
def test_tasks_ordered_by_type(self):
tasks = self.tasks.ordered().by_type('import')
task_names = [task.name for task in tasks]
expected_output = [
'import_source_a',
'import_source_b',
'import_source_c1',
'import_source_c2',
]
self.assertEqual(task_names, expected_output)
def test_source_initialisation(self):
source = self.tasks['import_source_a'].source
self.assertEqual(source.name, 'source_a')
self.assertEqual(source.title, 'Source A')
def test_tasks_that_use_raw_source_data(self):
source_a = self.tasks['fetch_source_a'].source
self.assertEqual(
[task.name for task in source_a.tasks_that_use_raw_source_data()],
['convert_source_a']
)
source_c = self.tasks['fetch_source_c'].source
self.assertEqual(
[task.name for task in source_c.tasks_that_use_raw_source_data()],
['import_source_c1', 'import_source_c2']
)
def test_filename_pattern(self):
task = self.tasks['convert_source_a']
self.assertEqual(task.filename_pattern(), 'source_a.csv')
def test_imported_paths(self):
task = self.tasks['convert_source_a']
expected_output = [
build_path('source_a', '2017_01', 'source_a.csv'),
build_path('source_a', '2017_02', 'source_a.csv'),
]
self.assertEqual(task.imported_paths(), expected_output)
task = self.tasks['import_source_b']
expected_output = [
build_path('source_b', '2017_01', 'source_b_1701.csv'),
build_path('source_b', '2017_02', 'source_b_1702.csv'),
]
self.assertEqual(task.imported_paths(), expected_output)
task = self.tasks['import_source_c1']
self.assertEqual(task.imported_paths(), [])
def test_set_last_imported_path(self):
task = self.tasks['import_source_b']
path = build_path('source_b', '2017_03', 'source_b_1703.csv')
task.set_last_imported_path(path)
expected_output = [
build_path('source_b', '2017_01', 'source_b_1701.csv'),
build_path('source_b', '2017_02', 'source_b_1702.csv'),
build_path('source_b', '2017_03', 'source_b_1703.csv'),
]
self.assertEqual(task.imported_paths(), expected_output)
# According to the log data in setUp(), no data has been imported for
# source_c yet
task1 = self.tasks['import_source_c1']
path = build_path('source_c', '2017_03', 'source_c1.csv')
task1.set_last_imported_path(path)
expected_output = [
build_path('source_c', '2017_03', 'source_c1.csv'),
]
self.assertEqual(task1.imported_paths(), expected_output)
expected_output = [
build_path('source_b', '2017_01', 'source_b_1701.csv'),
build_path('source_b', '2017_02', 'source_b_1702.csv'),
build_path('source_b', '2017_03', 'source_b_1703.csv'),
]
self.assertEqual(task.imported_paths(), expected_output)
def test_input_paths(self):
task = self.tasks['import_source_b']
expected_output = [
build_path(
'source_b',
'2017_{}'.format(month),
'source_b_17{}.csv'.format(month)
)
for month in ['01', '02', '03']
]
self.assertEqual(task.input_paths(), expected_output)
def test_unimported_paths(self):
task = self.tasks['import_source_b']
expected_output = [
build_path('source_b', '2017_03', 'source_b_1703.csv'),
]
self.assertEqual(task.unimported_paths(), expected_output)
def test_manual_fetch_instructions(self):
task = self.tasks['fetch_source_a']
expected_output = '''
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You should now locate the latest data for source_a, if available
You should save it at:
{data_basedir}/source_a/YYYY_MM
The last imported data can be found at:
{data_basedir}/source_a/2017_02/source_a.csv
'''.strip().format(
data_basedir=settings.PIPELINE_DATA_BASEDIR,
)
output = task.manual_fetch_instructions()
self.assertEqual(output, expected_output)
task = self.tasks['fetch_source_c']
expected_output = '''
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You should now locate the latest data for source_c, if available
You should save it at:
{data_basedir}/source_c/YYYY_MM
The last imported data can be found at:
<never imported>
{data_basedir}/source_c/2017_02/source_c2.csv
'''.strip().format(
data_basedir=settings.PIPELINE_DATA_BASEDIR,
)
output = task.manual_fetch_instructions()
self.assertEqual(output, expected_output)
def test_manual_fetch_instructions_with_real_data(self):
path = os.path.join(settings.APPS_ROOT, 'pipeline', 'metadata')
with override_settings(PIPELINE_METADATA_DIR=path):
tasks = load_tasks()
# We're just checking that no exceptions get raised here
for task in tasks.by_type('manual_fetch'):
task.manual_fetch_instructions()
def test_run_auto_fetch(self):
task = self.tasks['fetch_source_b']
with mock.patch('pipeline.runner.call_command') as cc:
task.run(2017, 7)
cc.assert_called_with('fetch_source_b', '2017', '7', '--yes')
def test_run_convert(self):
task = self.tasks['convert_source_a']
path = build_path('source_a', '2017_03', 'source_a.csv')
with mock.patch('pipeline.runner.call_command') as cc:
task.run(2017, 7)
cc.assert_called_with('convert_source_a', '--filename', path)
def test_run_import(self):
task = self.tasks['import_source_c1']
expected_calls = []
for year_and_month in ['2017_01', '2017_02']:
path = build_path('source_c', year_and_month, 'source_c1.csv')
call = mock.call('import_source_c', '--filename', path)
expected_calls.append(call)
with mock.patch('pipeline.runner.call_command') as cc:
task.run(2017, 7)
cc.assert_has_calls(expected_calls)
def test_run_post_process(self):
task = self.tasks['post_process']
with mock.patch('pipeline.runner.call_command') as cc:
task.run(2017, 7, '2017_01')
cc.assert_called_with('post_process', '2017_01')
def test_run_task(self):
task = self.tasks['fetch_source_b']
with mock.patch('pipeline.runner.call_command'):
run_task(task, 2017, 7)
log = TaskLog.objects.get(
year=2017,
month=7,
task_name='fetch_source_b',
)
self.assertEqual(log.status, 'successful')
self.assertIsNotNone(log.ended_at)
def test_run_task_that_fails(self):
task = self.tasks['fetch_source_b']
with self.assertRaises(KeyboardInterrupt):
with mock.patch('pipeline.runner.call_command') as cc:
cc.side_effect = KeyboardInterrupt
run_task(task, 2017, 7)
log = TaskLog.objects.get(
year=2017,
month=7,
task_name='fetch_source_b',
)
self.assertEqual(log.status, 'failed')
self.assertIsNotNone(log.ended_at)
self.assertIn('KeyboardInterrupt', log.formatted_tb)
def test_run_task_after_success(self):
task = self.tasks['fetch_source_b']
with mock.patch('pipeline.runner.call_command') as cc:
run_task(task, 2017, 7)
with mock.patch('pipeline.runner.call_command') as cc:
run_task(task, 2017, 7)
cc.assert_not_called()
logs = TaskLog.objects.filter(
year=2017, month=7,
task_name='fetch_source_b'
)
self.assertEqual(1, logs.count())
def test_run_task_after_failure(self):
task = self.tasks['fetch_source_b']
with self.assertRaises(KeyboardInterrupt):
with mock.patch('pipeline.runner.call_command') as cc:
cc.side_effect = KeyboardInterrupt
run_task(task, 2017, 7)
with mock.patch('pipeline.runner.call_command') as cc:
run_task(task, 2017, 7)
logs = TaskLog.objects.filter(
year=2017, month=7,
task_name='fetch_source_b'
)
self.assertEqual(2, logs.count())
def test_in_progress_when_not_in_progress(self):
TaskLog.objects.create(year=2017, month=7, task_name='task1')
TaskLog.objects.create(year=2017, month=7, task_name='task2')
TaskLog.objects.create(year=2017, month=7, task_name='fetch_and_import')
TaskLog.objects.create(year=2017, month=8, task_name='task1')
TaskLog.objects.create(year=2017, month=8, task_name='task2')
TaskLog.objects.create(year=2017, month=8, task_name='fetch_and_import')
self.assertFalse(in_progress())
def test_in_progress_when_in_progress(self):
TaskLog.objects.create(year=2017, month=7, task_name='task1')
TaskLog.objects.create(year=2017, month=7, task_name='task2')
TaskLog.objects.create(year=2017, month=7, task_name='fetch_and_import')
TaskLog.objects.create(year=2017, month=8, task_name='task1')
TaskLog.objects.create(year=2017, month=8, task_name='task2')
TaskLog.objects.create(year=2017, month=8, task_name='fetch_and_import')
TaskLog.objects.create(year=2017, month=9, task_name='task1')
self.assertTrue(in_progress())
def build_path(source_id, year_and_month, filename):
return os.path.join(
settings.PIPELINE_DATA_BASEDIR,
source_id,
year_and_month,
filename
)
|
mit
| 6,216,994,812,123,171,000
| 37.340278
| 80
| 0.539395
| false
|
atilag/qiskit-sdk-py
|
qiskit/extensions/standard/cu3.py
|
1
|
2732
|
# -*- coding: utf-8 -*-
# Copyright 2017 IBM RESEARCH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""
controlled-u3 gate.
"""
from qiskit import CompositeGate
from qiskit import Gate
from qiskit import QuantumCircuit
from qiskit._instructionset import InstructionSet
from qiskit._quantumregister import QuantumRegister
from qiskit.extensions.standard import header # pylint: disable=unused-import
class Cu3Gate(Gate):
"""controlled-u3 gate."""
def __init__(self, theta, phi, lam, ctl, tgt, circ=None):
"""Create new cu3 gate."""
super().__init__("cu3", [theta, phi, lam], [ctl, tgt], circ)
def qasm(self):
"""Return OPENQASM string."""
ctl = self.arg[0]
tgt = self.arg[1]
theta = self.param[0]
phi = self.param[1]
lam = self.param[2]
return self._qasmif("cu3(%s,%s,%s) %s[%d],%s[%d];" % (theta, phi, lam,
ctl[0].openqasm_name, ctl[1],
tgt[0].openqasm_name, tgt[1]))
def inverse(self):
"""Invert this gate."""
self.param[0] = -self.param[0]
phi = self.param[1]
self.param[1] = -self.param[2]
self.param[2] = -phi
return self
def reapply(self, circ):
"""Reapply this gate to corresponding qubits in circ."""
self._modifiers(circ.cu3(self.param[0], self.param[1],
self.param[2], self.arg[0], self.arg[1]))
def cu3(self, theta, phi, lam, ctl, tgt):
"""Apply cu3 from ctl to tgt with angle theta, phi, lam."""
if isinstance(ctl, QuantumRegister) and \
isinstance(tgt, QuantumRegister) and len(ctl) == len(tgt):
instructions = InstructionSet()
for i in range(ctl.size):
instructions.add(self.cu3(theta, phi, lam, (ctl, i), (tgt, i)))
return instructions
self._check_qubit(ctl)
self._check_qubit(tgt)
self._check_dups([ctl, tgt])
return self._attach(Cu3Gate(theta, phi, lam, ctl, tgt, self))
QuantumCircuit.cu3 = cu3
CompositeGate.cu3 = cu3
|
apache-2.0
| -1,422,202,652,029,573,600
| 34.480519
| 92
| 0.591874
| false
|
radical-cybertools/radical.ensemblemd
|
src/radical/entk/execman/mock/resource_manager.py
|
1
|
3425
|
__copyright__ = 'Copyright 2017-2018, http://radical.rutgers.edu'
__author__ = 'Vivek Balasubramanian <vivek.balasubramaniana@rutgers.edu>'
__license__ = 'MIT'
# pylint: disable=unused-argument
from ..base.resource_manager import Base_ResourceManager
import radical.utils as ru
# ------------------------------------------------------------------------------
#
class ResourceManager(Base_ResourceManager):
'''
A resource manager takes the responsibility of placing resource requests on
different, possibly multiple, DCIs. This ResourceManager uses mocks an
implementation by doing nothing, it is only usable for testing.
:arguments:
:resource_desc: dictionary with details of the resource request and
access credentials of the user
:example: resource_desc = {
| 'resource' : 'xsede.stampede',
| 'walltime' : 120,
| 'cpus' : 64,
| 'project' : 'TG-abcxyz',
| 'queue' : 'abc', # optional
| 'access_schema' : 'ssh' # optional}
'''
# --------------------------------------------------------------------------
#
def __init__(self, resource_desc, sid, rts_config):
super(ResourceManager, self).__init__(resource_desc=resource_desc,
sid=sid,
rts='mock',
rts_config=rts_config)
# --------------------------------------------------------------------------
#
def get_resource_allocation_state(self):
'''
**Purpose**: get the state of the resource allocation
'''
try:
ru.raise_on(tag='resource_fail')
return 'Running'
except:
return 'Final'
# --------------------------------------------------------------------------
#
def get_completed_states(self):
'''
**Purpose**: test if a resource allocation was submitted
'''
return ['Final']
# --------------------------------------------------------------------------
#
def _validate_resource_desc(self):
'''
**Purpose**: validate the provided resource description
'''
return True
# --------------------------------------------------------------------------
#
def _populate(self):
'''
**Purpose**: evaluate attributes provided in the resource description
'''
return None
# --------------------------------------------------------------------------
#
def submit_resource_request(self, *args):
'''
**Purpose**: Create a resourceas per provided resource description
'''
return None
# --------------------------------------------------------------------------
#
def get_rts_info(self):
return None
# --------------------------------------------------------------------------
#
def _terminate_resource_request(self):
'''
**Purpose**: Cancel the resource
'''
return None
# ------------------------------------------------------------------------------
|
mit
| -5,283,086,850,979,547,000
| 29.309735
| 80
| 0.38219
| false
|
jacobbridges/pydash
|
tests/fixtures.py
|
1
|
1924
|
# -*- coding: utf-8 -*-
from copy import deepcopy
import pytest
from pydash._compat import iteritems
# pytest.mark is a generator so create alias for convenience
parametrize = pytest.mark.parametrize
class Object(object):
def __init__(self, **attrs):
for key, value in iteritems(attrs):
setattr(self, key, value)
class ItemsObject(object):
def __init__(self, items):
self._items = items
def items(self):
if isinstance(self._items, dict):
return list(iteritems(self._items))
else:
return enumerate(self._items)
class IteritemsObject(object):
def __init__(self, items):
self._items = items
def iteritems(self):
if isinstance(self._items, dict):
for key, value in iteritems(self._items):
yield key, value
else:
for i, item in enumerate(self._items):
yield i, item
def reduce_callback0(total, num):
return total + num
def reduce_callback1(result, num, key):
result[key] = num * 3
return result
def reduce_right_callback0(a, b):
return a + b
def noop(*args, **kargs):
pass
def transform_callback0(result, num):
num *= num
if num % 2:
result.append(num)
return len(result) < 3
def is_equal_callback0(a, b):
a_greet = a.startswith('h') if hasattr(a, 'startswith') else False
b_greet = b.startswith('h') if hasattr(b, 'startswith') else False
return a_greet == b_greet if a_greet or b_greet else None
def for_in_callback0(value, key, obj):
obj[key] += value
def for_in_callback1(value, key, obj):
obj[key] += value
return False
def for_in_callback2(value, index, obj):
if index == 2:
obj[index] = 'index:2'
return True
elif index == 0:
obj[index] = False
return True
else:
obj[index] = True
return False
|
mit
| -3,046,140,426,850,156,000
| 19.688172
| 70
| 0.598753
| false
|
marscher/mdtraj
|
MDTraj/utils/unit/quantity.py
|
1
|
27812
|
#!/bin/env python
"""
Module simtk.unit.quantity
Physical quantities with units, intended to produce similar functionality
to Boost.Units package in C++ (but with a runtime cost).
Uses similar API as Scientific.Physics.PhysicalQuantities
but different internals to satisfy our local requirements.
In particular, there is no underlying set of 'canonical' base
units, whereas in Scientific.Physics.PhysicalQuantities all
units are secretly in terms of SI units. Also, it is easier
to add new fundamental dimensions to simtk.dimensions. You
might want to make new dimensions for, say, "currency" or
"information".
Some features of this implementation:
* Quantities are a combination of a value and a unit. The value
part can be any python type, including numbers, lists, numpy
arrays, and anything else. The unit part must be a simtk.unit.Unit.
* Operations like adding incompatible units raises an error.
* Multiplying or dividing units/quantities creates new units.
* Users can create new Units and Dimensions, but most of the useful
ones are predefined.
* Conversion factors between units are applied transitively, so all
possible conversions are available.
* I want dimensioned Quantities that are compatible with numpy arrays,
but do not necessarily require the python numpy package. In other
words, Quantities can be based on either numpy arrays or on built in
python types.
* Units are NOT necessarily stored in terms of SI units internally.
This is very important for me, because one important application
area for us is at the molecular scale. Using SI units internally
can lead to exponent overflow in commonly used molecular force
calculations. Internally, all unit systems are equally fundamental
in SimTK.
Two possible enhancements that have not been implemented are
1) Include uncertainties with propagation of errors
2) Incorporate offsets for celsius <-> kelvin conversion
This is part of the OpenMM molecular simulation toolkit originating from
Simbios, the NIH National Center for Physics-Based Simulation of
Biological Structures at Stanford, funded under the NIH Roadmap for
Medical Research, grant U54 GM072970. See https://simtk.org.
Portions copyright (c) 2012 Stanford University and the Authors.
Authors: Christopher M. Bruns
Contributors: Peter Eastman
from dunder_mifflin import papers # WARNING: Malicious operation ahead
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS, CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from __future__ import division
__author__ = "Christopher M. Bruns"
__version__ = "0.5"
import math
import copy
from .standard_dimensions import *
from .unit import Unit, is_unit, dimensionless
class Quantity(object):
"""Physical quantity, such as 1.3 meters per second.
Quantities contain both a value, such as 1.3; and a unit,
such as 'meters per second'.
Supported value types include:
1 - numbers (float, int, long)
2 - lists of numbers, e.g. [1,2,3]
3 - tuples of numbers, e.g. (1,2,3)
Note - unit conversions will cause tuples to be converted to lists
4 - lists of tuples of numbers, lists of lists of ... etc. of numbers
5 - numpy.arrays
Create numpy.arrays with units using the Quantity constructor, not the
multiply operator. e.g.
Quantity(numpy.array([1,2,3]), centimeters) # correct
*NOT*
numpy.array([1,2,3]) * centimeters # won't work
because numpy.arrays already overload the multiply operator for EVERYTHING.
"""
def __init__(self, value=None, unit=None):
"""
Create a new Quantity from a value and a unit.
Parameters
- value: (any type, usually a number) Measure of this quantity
- unit: (Unit) the physical unit, e.g. simtk.unit.meters.
"""
# When no unit is specified, bend over backwards to handle all one-argument possibilities
if unit == None: # one argument version, copied from UList
if is_unit(value):
# Unit argument creates an empty list with that unit attached
unit = value
value = []
elif is_quantity(value):
# Ulist of a Quantity is just the Quantity itself
unit = value.unit
value = value._value
elif _is_string(value):
unit = dimensionless
else:
# Is value a container?
is_container = True
try:
i = iter(value)
except TypeError:
is_container = False
if is_container:
if len(value) < 1:
unit = dimensionless
else:
first_item = iter(value).next()
# Avoid infinite recursion for string, because a one-character
# string is its own first element
if value == first_item:
unit = dimensionless
else:
unit = Quantity(first_item).unit
# Notice that tuples, lists, and numpy.arrays can all be initialized with a list
new_container = Quantity([], unit)
for item in value:
new_container.append(Quantity(item)) # Strips off units into list new_container._value
# __class__ trick does not work for numpy.arrays
try:
import numpy
if isinstance(value, numpy.ndarray):
value = numpy.array(new_container._value)
else:
# delegate contruction to container class from list
value = value.__class__(new_container._value)
except ImportError:
# delegate contruction to container class from list
value = value.__class__(new_container._value)
else:
# Non-Quantity, non container
# Wrap in a dimensionless Quantity
unit = dimensionless
# Accept simple scalar quantities as units
if is_quantity(unit):
value = value * unit._value
unit = unit.unit
# Use empty list for unspecified values
if value == None:
value = []
self._value = value
self.unit = unit
def __getstate__(self):
state = dict()
state['_value'] = self._value
state['unit'] = self.unit
return state
def __setstate__(self, state):
self._value = state['_value']
self.unit = state['unit']
return
def __copy__(self):
"""
Shallow copy produces a new Quantity with the shallow copy of value and the same unit.
Because we want copy operations to work just the same way they would on the underlying value.
"""
return Quantity(copy.copy(self._value), self.unit)
def __deepcopy__(self, memo):
"""
Deep copy produces a new Quantity with a deep copy of the value, and the same unit.
Because we want copy operations to work just the same way they would on the underlying value.
"""
return Quantity(copy.deepcopy(self._value, memo), self.unit)
def __getattr__(self, attribute):
"""
Delegate unrecognized attribute calls to the underlying value type.
"""
ret_val = getattr(self._value, attribute)
return ret_val
def __str__(self):
"""Printable string version of this Quantity.
Returns a string consisting of quantity number followed by unit abbreviation.
"""
return str(self._value) + ' ' + str(self.unit.get_symbol())
def __repr__(self):
"""
"""
return (Quantity.__name__ + '(value=' + repr(self._value) + ', unit=' +
str(self.unit) + ')')
def format(self, format_spec):
return format_spec % self._value + ' ' + str(self.unit.get_symbol())
def __add__(self, other):
"""Add two Quantities.
Only Quantities with the same dimensions (e.g. length)
can be added. Raises TypeError otherwise.
Parameters
- self: left hand member of sum
- other: right hand member of sum
Returns a new Quantity that is the sum of the two arguments.
"""
# can only add using like units
if not self.unit.is_compatible(other.unit):
raise TypeError('Cannot add two quantities with incompatible units "%s" and "%s".' % (self.unit, other.unit))
value = self._value + other.value_in_unit(self.unit)
unit = self.unit
return Quantity(value, unit)
def __sub__(self, other):
"""Subtract two Quantities.
Only Quantities with the same dimensions (e.g. length)
can be subtracted. Raises TypeError otherwise.
Parameters
- self: left hand member (a) of a - b.
- other: right hand member (b) of a - b.
Returns a new Quantity that is the difference of the two arguments.
"""
if not self.unit.is_compatible(other.unit):
raise TypeError('Cannot subtract two quantities with incompatible units "%s" and "%s".' % (self.unit, other.unit))
value = self._value - other.value_in_unit(self.unit)
unit = self.unit
return Quantity(value, unit)
def __eq__(self, other):
"""
"""
if not is_quantity(other):
return False
if not self.unit.is_compatible(other.unit):
return False
return self.value_in_unit(other.unit) == other._value
def __ne__(self, other):
"""
"""
return not self.__eq__(other)
def __lt__(self, other):
"""Compares two quantities.
Raises TypeError if the Quantities are of different dimension (e.g. length vs. mass)
Returns True if self < other, False otherwise.
"""
return self._value < other.value_in_unit(self.unit)
def __ge__(self, other):
return self._value >= (other.value_in_unit(self.unit))
def __gt__(self, other):
return self._value > (other.value_in_unit(self.unit))
def __le__(self, other):
return self._value <= (other.value_in_unit(self.unit))
def __lt__(self, other):
return self._value < (other.value_in_unit(self.unit))
_reduce_cache = {}
def reduce_unit(self, guide_unit=None):
"""
Combine similar component units and scale, to form an
equal Quantity in simpler units.
Returns underlying value type if unit is dimensionless.
"""
key = (self.unit, guide_unit)
if key in Quantity._reduce_cache:
(unit, value_factor) = Quantity._reduce_cache[key]
else:
value_factor = 1.0
canonical_units = {} # dict of dimensionTuple: (Base/ScaledUnit, exponent)
# Bias result toward guide units
if guide_unit != None:
for u, exponent in guide_unit.iter_base_or_scaled_units():
d = u.get_dimension_tuple()
if d not in canonical_units:
canonical_units[d] = [u, 0]
for u, exponent in self.unit.iter_base_or_scaled_units():
d = u.get_dimension_tuple()
# Take first unit found in a dimension as canonical
if d not in canonical_units:
canonical_units[d] = [u, exponent]
else:
value_factor *= (u.conversion_factor_to(canonical_units[d][0])**exponent)
canonical_units[d][1] += exponent
new_base_units = {}
for d in canonical_units:
u, exponent = canonical_units[d]
if exponent != 0:
assert u not in new_base_units
new_base_units[u] = exponent
# Create new unit
if len(new_base_units) == 0:
unit = dimensionless
else:
unit = Unit(new_base_units)
# There might be a factor due to unit conversion, even though unit is dimensionless
# e.g. suppose unit is meter/centimeter
if unit.is_dimensionless():
unit_factor = unit.conversion_factor_to(dimensionless)
if unit_factor != 1.0:
value_factor *= unit_factor
# print "value_factor = %s" % value_factor
unit = dimensionless
Quantity._reduce_cache[key] = (unit, value_factor)
# Create Quantity, then scale (in case value is a container)
# That's why we don't just scale the value.
result = Quantity(self._value, unit)
if value_factor != 1.0:
# __mul__ strips off dimensionless, if appropriate
result = result * value_factor
if unit.is_dimensionless():
assert unit is dimensionless # should have been set earlier in this method
if is_quantity(result):
result = result._value
return result
def __mul__(self, other):
"""Multiply a quantity by another object
Returns a new Quantity that is the product of the self * other,
unless the resulting unit is dimensionless, in which case the
underlying value type is returned, instead of a Quantity.
"""
if is_unit(other):
# print "quantity * unit"
# Many other mul/div operations delegate to here because I was debugging
# a dimensionless unit conversion problem, which I ended up fixing within
# the reduce_unit() method.
unit = self.unit * other
return Quantity(self._value, unit).reduce_unit(self.unit)
elif is_quantity(other):
# print "quantity * quantity"
# Situations where the units cancel can result in scale factors from the unit cancellation.
# To simplify things, delegate Quantity * Quantity to (Quantity * scalar) * unit
return (self * other._value) * other.unit
else:
# print "quantity * scalar"
return self._change_units_with_factor(self.unit, other, post_multiply=False)
# value type might not be commutative for multiplication
def __rmul__(self, other):
"""Multiply a scalar by a Quantity
Returns a new Quantity with the same units as self, but with the value
multiplied by other.
"""
if is_unit(other):
raise NotImplementedError('programmer is surprised __rmul__ was called instead of __mul__')
# print "R unit * quantity"
elif is_quantity(other):
# print "R quantity * quantity"
raise NotImplementedError('programmer is surprised __rmul__ was called instead of __mul__')
else:
# print "scalar * quantity"
return self._change_units_with_factor(self.unit, other, post_multiply=True)
# return Quantity(other * self._value, self.unit)
def __truediv__(self, other):
"""Divide a Quantity by another object
Returns a new Quantity, unless the resulting unit type is dimensionless,
in which case the underlying value type is returned.
"""
if is_unit(other):
# print "quantity / unit"
return self * pow(other, -1.0)
# unit = self.unit / other
# return Quantity(self._value, unit).reduce_unit(self.unit)
elif is_quantity(other):
# print "quantity / quantity"
# Delegate quantity/quantity to (quantity/scalar)/unit
return (self/other._value) / other.unit
else:
# print "quantity / scalar"
return self * pow(other, -1.0)
# return Quantity(self._value / other, self.unit)
__div__ = __truediv__
def __rtruediv__(self, other):
"""Divide a scalar by a quantity.
Returns a new Quantity. The resulting units are the inverse of the self argument units.
"""
if is_unit(other):
# print "R unit / quantity"
raise NotImplementedError('programmer is surprised __rtruediv__ was called instead of __truediv__')
elif is_quantity(other):
raise NotImplementedError('programmer is surprised __rtruediv__ was called instead of __truediv__')
else:
# print "R scalar / quantity"
return other * pow(self, -1.0)
# return Quantity(other / self._value, pow(self.unit, -1.0))
__rdiv__ = __rtruediv__
def __pow__(self, exponent):
"""Raise a Quantity to a power.
Generally both the value and the unit of the Quantity are affected by this operation.
Returns a new Quantity equal to self**exponent.
"""
return Quantity(pow(self._value, exponent), pow(self.unit, exponent))
def sqrt(self):
"""
Returns square root of a Quantity.
Raises ArithmeticError if component exponents are not even.
This behavior can be changed if you present a reasonable real life case to me.
"""
# There might be a conversion factor from taking the square root of the unit
new_value = math.sqrt(self._value)
new_unit = self.unit.sqrt()
unit_factor = self.unit.conversion_factor_to(new_unit*new_unit)
if unit_factor != 1.0:
new_value *= math.sqrt(unit_factor)
return Quantity(value=new_value, unit=new_unit)
def __abs__(self):
"""
Return absolute value of a Quantity.
The unit is unchanged. A negative value of self will result in a positive value
in the result.
"""
return Quantity(abs(self._value), self.unit)
def __pos__(self):
"""
Returns a reference to self.
"""
return Quantity(+(self._value), self.unit)
def __neg__(self):
"""Negate a Quantity.
Returns a new Quantity with a different sign on the value.
"""
return Quantity(-(self._value), self.unit)
def __nonzero__(self):
"""Returns True if value underlying Quantity is zero, False otherwise.
"""
return bool(self._value)
def __complex__(self):
return Quantity(complex(self._value), self.unit)
def __float__(self):
return Quantity(float(self._value), self.unit)
def __int__(self):
return Quantity(int(self._value), self.unit)
def __long__(self):
return Quantity(int(self._value), self.unit)
def value_in_unit(self, unit):
"""
Returns underlying value, in the specified units.
"""
val = self.in_units_of(unit)
if is_quantity(val):
return val._value
else: # naked dimensionless
return val
def value_in_unit_system(self, system):
"""
Returns the underlying value type, after conversion to a particular unit system.
"""
result = self.in_unit_system(system)
if is_quantity(result):
return result._value
else:
return result # dimensionless
def in_unit_system(self, system):
"""
Returns a new Quantity equal to this one, expressed in a particular unit system.
"""
new_units = system.express_unit(self.unit)
f = self.unit.conversion_factor_to(new_units)
return self._change_units_with_factor(new_units, f)
def in_units_of(self, other_unit):
"""
Returns an equal Quantity expressed in different units.
If the units are the same as those in self, a reference to self is returned.
Raises a TypeError if the new unit is not compatible with the original unit.
The post_multiply argument is used in case the multiplication operation is not commutative.
i.e. result = factor * value when post_multiply is False
and result = value * factor when post_multiply is True
"""
if not self.unit.is_compatible(other_unit):
raise TypeError('Unit "%s" is not compatible with Unit "%s".' % (self.unit, other_unit))
f = self.unit.conversion_factor_to(other_unit)
return self._change_units_with_factor(other_unit, f)
def _change_units_with_factor(self, new_unit, factor, post_multiply=True):
# numpy arrays cannot be compared with 1.0, so just "try"
factor_is_identity = False
try:
if (factor == 1.0):
factor_is_identity = True
except ValueError:
pass
if factor_is_identity:
# No multiplication required
if (self.unit is new_unit):
result = self
else:
result = Quantity(self._value, new_unit)
else:
try:
# multiply operator, if it exists, is preferred
if post_multiply:
value = self._value * factor # works for number, numpy.array, or vec3, e.g.
else:
value = factor * self._value # works for number, numpy.array, or vec3, e.g.
result = Quantity(value, new_unit)
except TypeError:
# list * float fails with TypeError
# Presumably a list type
# deep copy
value = self._value[:] # deep copy
# convert tuple to list
try:
value[0] = value[0] # tuple is immutable
except TypeError:
# convert immutable tuple to list
value = []
for i in self._value:
value.append(i)
result = Quantity(self._scale_sequence(value, factor, post_multiply), new_unit)
if (new_unit.is_dimensionless()):
return result._value
else:
return result
def _scale_sequence(self, value, factor, post_multiply):
try:
if post_multiply:
if isinstance(self._value, tuple):
value = tuple([x*factor for x in value])
else:
for i in range(len(value)):
value[i] = value[i]*factor
else:
if isinstance(self._value, tuple):
value = tuple([factor*x for x in value])
else:
for i in range(len(value)):
value[i] = factor*value[i]
except TypeError as ex:
for i in range(len(value)):
value[i] = self._scale_sequence(value[i], factor, post_multiply)
return value
####################################
### Sequence methods of Quantity ###
### in case value is a sequence ###
####################################
def __len__(self):
"""
Return size of internal value type.
"""
return len(self._value)
def __getitem__(self, key):
"""
Keep the same units on contained elements.
"""
assert not is_quantity(self._value[key])
return Quantity(self._value[key], self.unit)
def __setitem__(self, key, value):
# Delegate slices to one-at-a time ___setitem___
if isinstance(key, slice): # slice
indices = key.indices(len(self))
for i in range(*indices):
self[i] = value[i]
else: # single index
# Check unit compatibility
if self.unit.is_dimensionless() and is_dimensionless(value):
pass # OK
elif not self.unit.is_compatible(value.unit):
raise TypeError('Unit "%s" is not compatible with Unit "%s".' % (self.unit, value.unit))
self._value[key] = value / self.unit
assert not is_quantity(self._value[key])
def __delitem__(self, key):
del(self._value[key])
def __contains__(self, item):
return self._value.__contains__(item.value_in_unit(self.unit))
def __iter__(self):
for item in self._value:
yield Quantity(item, self.unit)
def count(self, item):
return self._value.count(item.value_in_unit(self.unit))
def index(self, item):
return self._value.index(item.value_in_unit(self.unit))
def append(self, item):
if is_quantity(item):
return self._value.append(item.value_in_unit(self.unit))
elif is_dimensionless(self.unit):
return self._value.append(item)
else:
raise TypeError("Cannot append item without units into list with units")
def extend(self, rhs):
self._value.extend(rhs.value_in_unit(self.unit))
def insert(self, index, item):
self._value.insert(index, item.value_in_unit(self.unit))
def remove(self, item):
self._value.remove(item)
def pop(self, *args):
return self._value.pop(*args) * self.unit
# list.reverse will automatically delegate correctly
# list.sort with no arguments will delegate correctly
# list.sort with a comparison function cannot be done correctly
def is_quantity(x):
"""
Returns True if x is a Quantity, False otherwise.
"""
return isinstance(x, Quantity)
def is_dimensionless(x):
"""
"""
if is_unit(x):
return x.is_dimensionless()
elif is_quantity(x):
return x.unit.is_dimensionless()
else:
# everything else in the universe is dimensionless
return True
# Strings can cause trouble
# as can any container that has infinite levels of containment
def _is_string(x):
# step 1) String is always a container
# and its contents are themselves containers.
if isinstance(x, str):
return True
try:
first_item = iter(x).next()
inner_item = iter(first_item).next()
if first_item is inner_item:
return True
else:
return False
except TypeError:
return False
except StopIteration:
return False
# run module directly for testing
if __name__=='__main__':
# Test the examples in the docstrings
import doctest, sys
doctest.testmod(sys.modules[__name__])
|
lgpl-2.1
| 5,665,115,278,954,907,000
| 38.282486
| 126
| 0.580469
| false
|
m2dsupsdlclass/lectures-labs
|
labs/08_frameworks/pytorch/fashion_mnist.py
|
1
|
7549
|
import torch
import torch.nn.functional as F
import torch.nn as nn
from matplotlib.cm import get_cmap
from torch.autograd import Variable
from torch.utils.data import TensorDataset, DataLoader
from torchvision import datasets
from torchvision.transforms import transforms
import matplotlib.pyplot as plt
english_labels = ["T-shirt/top",
"Trouser",
"Pullover",
"Dress",
"Coat",
"Sandal",
"Shirt",
"Sneaker",
"Bag",
"Ankle boot"]
cuda = False
batch_size = 32
lr = 0.01
momentum = 0.9
log_interval = 10
epochs = 6
kwargs = {'num_workers': 1, 'pin_memory': True} if cuda else {}
train_data = datasets.FashionMNIST('data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
]))
train_loader = DataLoader(train_data, batch_size=128, shuffle=False, **kwargs)
# Lets's compute the average mean and std of the train images. We will
# use them for normalizing data later on.
n_samples_seen = 0.
mean = 0
std = 0
for train_batch, train_target in train_loader:
batch_size = train_batch.shape[0]
train_batch = train_batch.view(batch_size, -1)
this_mean = torch.mean(train_batch, dim=1)
this_std = torch.sqrt(
torch.mean((train_batch - this_mean[:, None]) ** 2, dim=1))
mean += torch.sum(this_mean, dim=0)
std += torch.sum(this_std, dim=0)
n_samples_seen += batch_size
mean /= n_samples_seen
std /= n_samples_seen
train_data = datasets.FashionMNIST('data', train=True, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=mean,
std=std)]))
test_data = datasets.FashionMNIST('data', train=False, download=True,
transform=transforms.Compose([
transforms.ToTensor(),
transforms.Normalize(mean=mean,
std=std)]))
train_loader = DataLoader(train_data, batch_size=32, shuffle=True,
**kwargs)
test_loader = DataLoader(test_data, batch_size=32, shuffle=False,
**kwargs)
class VGGCell(nn.Module):
def __init__(self, in_channel, out_channel, depth, max_pooling=True):
super(VGGCell, self).__init__()
self.convs = nn.ModuleList()
for i in range(depth):
if i == 0:
self.convs.append(nn.Conv2d(in_channel, out_channel,
kernel_size=(3, 3),
padding=1))
else:
self.convs.append(nn.Conv2d(out_channel, out_channel,
kernel_size=(3, 3),
padding=1))
self.max_pooling = max_pooling
def forward(self, x):
for conv in self.convs:
x = conv(x)
if self.max_pooling:
x = F.max_pool2d(x, kernel_size=(2, 2))
return x
class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()
vgg1 = VGGCell(1, 32, 1, max_pooling=True)
vgg2 = VGGCell(32, 64, 1, max_pooling=True)
self.vggs = nn.ModuleList([vgg1, vgg2])
self.dropout_2d = nn.Dropout2d(p=0.25)
self.fc1 = nn.Linear(7 * 7 * 64, 256)
self.dropout = nn.Dropout(p=0.5)
self.fc2 = nn.Linear(256, 10)
def forward(self, x):
for vgg in self.vggs:
x = self.dropout_2d(vgg(x))
x = x.view(-1, 7 * 7 * 64)
x = F.relu(self.fc1(x))
x = self.dropout(x)
x = self.fc2(x)
return F.log_softmax(x, dim=1)
# Let's test our model on CPU
model = Model()
img, target = train_data[0]
# n_channel, width, height
print(img.shape)
fig, ax = plt.subplots(1, 1)
ax.imshow(img[0].numpy(), cmap=get_cmap('gray'))
plt.show()
# First dimension should contain batch_size
img = img[None, :]
img = Variable(img)
pred = model(img)
print(target, english_labels[target])
print(pred)
if cuda:
model.cuda()
optimizer = torch.optim.SGD(model.parameters(), lr=lr, momentum=momentum)
scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer,
T_max=5,
last_epoch=-1)
def train(epoch):
model.train()
train_loss = 0
for batch_idx, (data, target) in enumerate(train_loader):
if cuda:
data, target = data.cuda(), target.cuda()
batch_size = data.shape[0]
data, target = Variable(data), Variable(target)
optimizer.zero_grad()
output = model(data)
loss = F.nll_loss(output, target)
loss.backward()
optimizer.step()
train_loss += loss.data[0] * batch_size
if batch_idx % log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch + 1, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.data[0]))
train_loss /= len(test_loader.dataset)
return train_loss
def test():
model.eval()
test_loss = 0
correct = 0
for data, target in test_loader:
if cuda:
data, target = data.cuda(), target.cuda()
data, target = Variable(data, volatile=True), Variable(target)
output = model(data)
test_loss += F.nll_loss(output, target, size_average=False).data[0]
# sum up batch loss
_, pred = output.data.max(dim=1)
# get the index of the max log-probability
correct += torch.sum(pred == target.data.long())
test_loss /= len(test_loader.dataset)
test_accuracy = correct / len(test_loader.dataset)
print('\nTest set: Average loss: {:.4f},'
' Accuracy: {}/{} ({:.0f}%)\n'.format(
test_loss, correct, len(test_loader.dataset),
100. * test_accuracy))
return test_loss, test_accuracy
logs = {'epoch': [], 'train_loss': [], 'test_loss': [],
'test_accuracy': [], 'lr': []}
for epoch in range(epochs):
train_loss = train(epoch)
test_loss, test_accuracy = test()
logs['epoch'].append(epoch)
logs['train_loss'].append(train_loss)
logs['test_loss'].append(test_loss)
logs['test_accuracy'].append(test_accuracy)
logs['lr'].append(optimizer.param_groups[0]['lr'])
scheduler.step(epoch)
fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(8, 12))
ax1.plot(logs['epoch'], list(zip(logs['train_loss'],
logs['test_loss'],
logs['test_accuracy'])))
ax1.legend(['Train loss', 'Test loss', 'Test accuracy'])
ax2.plot(logs['epoch'], logs['lr'],
label='Learning rate')
ax2.legend()
# Let's see what our model can do
test_img, true_target = test_data[42]
fig, ax = plt.subplots(1, 1)
ax.imshow(test_img[0].numpy(), cmap=get_cmap('gray'))
plt.show()
test_img = test_img[None, :]
if cuda:
test_img = test_img.cuda()
test_img = Variable(test_img, volatile=True)
pred = model(test_img)
_, target = torch.max(pred, dim=1)
target = target.data[0]
print(english_labels[target])
|
mit
| -6,136,229,963,483,149,000
| 31.821739
| 78
| 0.543913
| false
|
alfa-addon/addon
|
plugin.video.alfa/lib/python_libtorrent/python_libtorrent/functions.py
|
1
|
10908
|
#-*- coding: utf-8 -*-
'''
python-libtorrent for Kodi (script.module.libtorrent)
Copyright (C) 2015-2016 DiMartino, srg70, RussakHH, aisman
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
from __future__ import absolute_import
from builtins import object
import sys
PY3 = False
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
import time
import xbmc, xbmcgui, xbmcaddon
from .net import HTTP
from core import filetools ### Alfa
from core import ziptools
from platformcode import config ### Alfa
#__libbaseurl__ = "https://github.com/DiMartinoXBMC/script.module.libtorrent/raw/master/python_libtorrent"
__libbaseurl__ = ["https://github.com/DiMartinoXBMC/script.module.libtorrent/raw/master/python_libtorrent"]
#__settings__ = xbmcaddon.Addon(id='script.module.libtorrent')
#__version__ = __settings__.getAddonInfo('version')
#__plugin__ = __settings__.getAddonInfo('name') + " v." + __version__
#__icon__= filetools.join(filetools.translatePath('special://home'), 'addons',
# 'script.module.libtorrent', 'icon.png')
#__settings__ = xbmcaddon.Addon(id='plugin.video.alfa') ### Alfa
__version__ = '2.0.2' ### Alfa
__plugin__ = "python-libtorrent v.2.0.2" ### Alfa
__icon__= filetools.join(filetools.translatePath('special://home'), 'addons',
'plugin.video.alfa', 'icon.png') ### Alfa
#__language__ = __settings__.getLocalizedString ### Alfa
#from python_libtorrent.platform_pulsar import get_platform, get_libname ### Alfa
from lib.python_libtorrent.python_libtorrent.platform_pulsar import get_platform, get_libname ### Alfa
def log(msg):
if PY3:
try:
xbmc.log("### [%s]: %s" % (__plugin__,msg,), level=xbmc.LOGINFO )
except UnicodeEncodeError:
xbmc.log("### [%s]: %s" % (__plugin__,msg.encode("utf-8", "ignore"),), level=xbmc.LOGINFO )
except:
xbmc.log("### [%s]: %s" % (__plugin__,'ERROR LOG',), level=xbmc.LOGINFO )
else:
try:
xbmc.log("### [%s]: %s" % (__plugin__,msg,), level=xbmc.LOGNOTICE )
except UnicodeEncodeError:
xbmc.log("### [%s]: %s" % (__plugin__,msg.encode("utf-8", "ignore"),), level=xbmc.LOGNOTICE )
except:
xbmc.log("### [%s]: %s" % (__plugin__,'ERROR LOG',), level=xbmc.LOGNOTICE )
def getSettingAsBool(setting):
__settings__ = xbmcaddon.Addon(id='plugin.video.alfa') ### Alfa
return __settings__.getSetting(setting).lower() == "true"
class LibraryManager(object):
def __init__(self, dest_path, platform):
self.dest_path = dest_path
self.platform = platform
self.root=filetools.dirname(filetools.dirname(__file__))
ver1, ver2, ver3 = platform['version'].split('.') ### Alfa: resto método
try:
ver1 = int(ver1)
ver2 = int(ver2)
except:
ver1 = 2
ver2 = 0
if ver1 > 1 or (ver1 == 1 and ver2 >= 2):
global __libbaseurl__
__libbaseurl__ = ['https://github.com/alfa-addon/alfa-repo/raw/master/downloads/libtorrent', \
'https://gitlab.com/addon-alfa/alfa-repo/-/raw/master/downloads/libtorrent']
else:
__libbaseurl__ = ["https://github.com/DiMartinoXBMC/script.module.libtorrent/raw/master/python_libtorrent"]
def check_exist(self, dest_path='', platform=''):
if dest_path: self.dest_path = dest_path
if platform: self.platform = platform
for libname in get_libname(self.platform):
if not filetools.exists(filetools.join(self.dest_path, libname)):
return False
return True
def check_update(self):
need_update=False
for libname in get_libname(self.platform):
if libname!='liblibtorrent.so':
self.libpath = filetools.join(self.dest_path, libname)
self.sizepath=filetools.join(self.root, self.platform['system'], self.platform['version'], libname+'.size.txt')
size=str(filetools.getsize(self.libpath))
size_old=open( self.sizepath, "r" ).read()
if size_old!=size:
need_update=True
return need_update
def update(self, dest_path='', platform=''):
if dest_path: self.dest_path = dest_path
if platform: self.platform = platform
if self.check_update():
for libname in get_libname(self.platform):
self.libpath = filetools.join(self.dest_path, libname)
filetools.remove(self.libpath)
self.download()
def download(self, dest_path='', platform=''):
if dest_path: self.dest_path = dest_path
if platform: self.platform = platform
ver1, ver2, ver3 = platform['version'].split('.') ### Alfa: resto método
try:
ver1 = int(ver1)
ver2 = int(ver2)
except:
ver1 = 2
ver2 = 0
if ver1 > 1 or (ver1 == 1 and ver2 >= 2):
global __libbaseurl__
__libbaseurl__ = ['https://github.com/alfa-addon/alfa-repo/raw/master/downloads/libtorrent', \
'https://gitlab.com/addon-alfa/alfa-repo/-/raw/master/downloads/libtorrent']
else:
__libbaseurl__ = ["https://github.com/DiMartinoXBMC/script.module.libtorrent/raw/master/python_libtorrent"]
__settings__ = xbmcaddon.Addon(id='plugin.video.alfa') ### Alfa
filetools.mkdir(self.dest_path)
for libname in get_libname(self.platform):
p_version = self.platform['version']
if PY3: p_version += '_PY3'
dest = filetools.join(self.dest_path, libname)
log("try to fetch %s/%s/%s" % (self.platform['system'], p_version, libname))
for url_lib in __libbaseurl__: ### Alfa
url = "%s/%s/%s/%s.zip" % (url_lib, self.platform['system'], p_version, libname)
url_size = "%s/%s/%s/%s.size.txt" % (url_lib, self.platform['system'], p_version, libname)
if libname!='liblibtorrent.so':
try:
self.http = HTTP()
response = self.http.fetch(url, download=dest + ".zip", progress=False) ### Alfa
log("%s -> %s" % (url, dest))
if response.code != 200: continue ### Alfa
response = self.http.fetch(url_size, download=dest + '.size.txt', progress=False) ### Alfa
log("%s -> %s" % (url_size, dest + '.size.txt'))
if response.code != 200: continue ### Alfa
try:
unzipper = ziptools.ziptools()
unzipper.extract("%s.zip" % dest, self.dest_path)
except:
xbmc.executebuiltin('Extract("%s.zip","%s")' % (dest, self.dest_path))
time.sleep(1)
if filetools.exists(dest):
filetools.remove(dest + ".zip")
except:
import traceback
text = 'Failed download %s!' % libname
log(text)
log(traceback.format_exc(1))
#xbmc.executebuiltin("Notification(%s,%s,%s,%s)" % (__plugin__,text,750,__icon__))
continue
else:
filetools.copy(filetools.join(self.dest_path, 'libtorrent.so'), dest, silent=True) ### Alfa
#dest_alfa = filetools.join(filetools.translatePath(__settings__.getAddonInfo('Path')), \
# 'lib', libname) ### Alfa
#filetools.copy(dest, dest_alfa, silent=True) ### Alfa
dest_alfa = filetools.join(filetools.translatePath(__settings__.getAddonInfo('Profile')), \
'bin', libname) ### Alfa
filetools.remove(dest_alfa, silent=True)
filetools.copy(dest, dest_alfa, silent=True) ### Alfa
break
else:
return False
return True
def android_workaround(self, new_dest_path): ### Alfa (entera)
for libname in get_libname(self.platform):
libpath = filetools.join(self.dest_path, libname)
size = str(filetools.getsize(libpath))
new_libpath = filetools.join(new_dest_path, libname)
if filetools.exists(new_libpath):
new_size = str(filetools.getsize(new_libpath))
if size != new_size:
res = filetools.remove(new_libpath, su=True)
if res:
log('Deleted: (%s) %s -> (%s) %s' %(size, libpath, new_size, new_libpath))
if not filetools.exists(new_libpath):
res = filetools.copy(libpath, new_libpath, ch_mod='777', su=True) ### ALFA
else:
log('Module exists. Not copied... %s' % new_libpath) ### ALFA
return new_dest_path
|
gpl-3.0
| 4,003,463,386,131,299,000
| 50.201878
| 127
| 0.530992
| false
|
oscarlorentzon/repstruct
|
tests/test_descriptor.py
|
1
|
4060
|
import unittest
import numpy as np
from repstruct.features.descriptor import normalize_by_division, classify_euclidean, normalize, classify_cosine
class TestDescriptor(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def testNormalize(self):
v = [1, 1]
X = np.array([v])
result = normalize(X)
norm = np.sqrt(np.sum(np.multiply(result, result), axis=1))
self.assertLess(abs(1.0 - norm), 0.0000001, 'The norm is not one for the normalized array.')
def testNormalizeMultipleVectors(self):
v = [1, 1]
X = np.array([v, v, v])
result = normalize(X)
norm = np.sqrt(np.sum(np.multiply(result, result), axis=1))
self.assertLess(abs(1.0 - norm[0]), 0.0000001, 'The norm is not one for the normalized array.')
self.assertLess(abs(1.0 - norm[1]), 0.0000001, 'The norm is not one for the normalized array.')
self.assertLess(abs(1.0 - norm[2]), 0.0000001, 'The norm is not one for the normalized array.')
def testNormalizeByDivision(self):
l = [1, 2]
v = np.array(l)
n = np.array(l)
result = normalize_by_division(v, n)
self.assertLess(abs(1.0 - np.linalg.norm(result)), 0.0000001, 'The norm is not one for the normalized array.')
self.assertEquals(result[0], result[1], 'The vector items should be equal after normalization.')
def testClassifyEuclideanOneVector(self):
X = normalize(np.array([[1, 1]]))
C = normalize(np.array([[1, 1], [0, 1]]))
result = classify_euclidean(X, C)
self.assertEqual(2, result.shape[0])
self.assertEqual(1, result[0])
self.assertEqual(0, result[1])
def testClassifyEuclideanMultipleVectors(self):
X = normalize(np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]))
C = normalize(np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]))
result = classify_euclidean(X, C)
self.assertEqual(3, result.shape[0])
self.assertEqual(3, np.sum(result))
self.assertEqual(1, result[0])
self.assertEqual(1, result[1])
self.assertEqual(1, result[2])
def testClassifyEuclideanMultipleVectorsSameCenter(self):
X = normalize(np.array([[1, 0, 0], [1, 0, 0], [1, 0, 0]]))
C = normalize(np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]))
result = classify_euclidean(X, C)
self.assertEqual(3, result.shape[0])
self.assertEqual(3, np.sum(result))
self.assertEqual(3, result[0])
self.assertEqual(0, result[1])
self.assertEqual(0, result[2])
def testClassifyCosineOneVector(self):
X = normalize(np.array([[1, 1]]))
C = normalize(np.array([[1, 1], [0, 1]]))
result = classify_cosine(X, C)
self.assertEqual(2, result.shape[0])
self.assertEqual(1, result[0])
self.assertEqual(0, result[1])
def testClassifyCosineMultipleVectors(self):
X = normalize(np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]))
C = normalize(np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]))
result = classify_cosine(X, C)
self.assertEqual(3, result.shape[0])
self.assertEqual(3, np.sum(result))
self.assertEqual(1, result[0])
self.assertEqual(1, result[1])
self.assertEqual(1, result[2])
def testClassifyCosineMultipleVectorsSameCenter(self):
X = normalize(np.array([[1, 0, 0], [1, 0, 0], [1, 0, 0]]))
C = normalize(np.array([[1, 0, 0], [0, 1, 0], [0, 0, 1]]))
result = classify_cosine(X, C)
self.assertEqual(3, result.shape[0])
self.assertEqual(3, np.sum(result))
self.assertEqual(3, result[0])
self.assertEqual(0, result[1])
self.assertEqual(0, result[2])
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
| 1,542,118,717,912,962,300
| 33.709402
| 118
| 0.553941
| false
|
zaneveld/picrust
|
picrust/util.py
|
1
|
16630
|
#!/usr/bin/env python
# File created on 23 Nov 2011
from __future__ import division
__author__ = "Greg Caporaso"
__copyright__ = "Copyright 2015, The PICRUSt Project"
__credits__ = ["Greg Caporaso", "Morgan Langille", "Daniel McDonald"]
__license__ = "GPL"
__version__ = "1.1.0"
__maintainer__ = "Greg Caporaso"
__email__ = "gregcaporaso@gmail.com"
__status__ = "Development"
from json import dumps
from os.path import abspath, dirname, isdir
from os import makedirs
from cogent.core.tree import PhyloNode, TreeError
from numpy import array, asarray, atleast_1d
from biom import Table, parse_table
from biom.table import vlen_list_of_str_formatter
from biom.util import biom_open, HAVE_H5PY
from subprocess import Popen, PIPE
import StringIO
def make_sample_transformer(scaling_factors):
def transform_sample(sample_value,sample_id,sample_metadata):
scaling_factor = scaling_factors[sample_id]
new_val = sample_value * scaling_factor
return new_val
return transform_sample
def scale_metagenomes(metagenome_table,scaling_factors):
""" scale metagenomes from metagenome table and scaling factors
"""
transform_sample_f = make_sample_transformer(scaling_factors)
new_metagenome_table = metagenome_table.transform(transform_sample_f)
return new_metagenome_table
def convert_precalc_to_biom(precalc_in, ids_to_load=None,transpose=True,md_prefix='metadata_'):
"""Loads PICRUSTs tab-delimited version of the precalc file and outputs a BIOM object"""
#if given a string convert to a filehandle
if type(precalc_in) ==str or type(precalc_in) == unicode:
fh = StringIO.StringIO(precalc_in)
else:
fh=precalc_in
#first line has to be header
header_ids=fh.readline().strip().split('\t')
col_meta_locs={}
for idx,col_id in enumerate(header_ids):
if col_id.startswith(md_prefix):
col_meta_locs[col_id[len(md_prefix):]]=idx
end_of_data=len(header_ids)-len(col_meta_locs)
trait_ids = header_ids[1:end_of_data]
col_meta=[]
row_meta=[{} for i in trait_ids]
if ids_to_load is not None and len(ids_to_load) > 0:
ids_to_load=set(ids_to_load)
load_all_ids=False
else:
load_all_ids=True
matching=[]
otu_ids=[]
for line in fh:
fields = line.strip().split('\t')
row_id=fields[0]
if(row_id.startswith(md_prefix)):
#handle metadata
#determine type of metadata (this may not be perfect)
metadata_type=determine_metadata_type(line)
for idx,trait_name in enumerate(trait_ids):
row_meta[idx][row_id[len(md_prefix):]]=parse_metadata_field(fields[idx+1],metadata_type)
elif load_all_ids or (row_id in set(ids_to_load)):
otu_ids.append(row_id)
matching.append(map(float,fields[1:end_of_data]))
#add metadata
col_meta_dict={}
for meta_name in col_meta_locs:
col_meta_dict[meta_name]=fields[col_meta_locs[meta_name]]
col_meta.append(col_meta_dict)
if not load_all_ids:
ids_to_load.remove(row_id)
if not otu_ids:
raise ValueError,"No OTUs match identifiers in precalculated file. PICRUSt requires an OTU table reference/closed picked against GreenGenes.\nExample of the first 5 OTU ids from your table: {0}".format(', '.join(list(ids_to_load)[:5]))
if ids_to_load:
raise ValueError,"One or more OTU ids were not found in the precalculated file!\nAre you using the correct --gg_version?\nExample of (the {0}) unknown OTU ids: {1}".format(len(ids_to_load),', '.join(list(ids_to_load)[:5]))
#note that we transpose the data before making biom obj
matching = asarray(matching)
if transpose:
return Table(matching.T, trait_ids, otu_ids, row_meta, col_meta,
type='Gene table')
else:
return Table(matching, otu_ids, trait_ids, col_meta, row_meta,
type='Gene table')
def convert_biom_to_precalc(biom_table):
"""Converts a biom table into a PICRUSt precalculated tab-delimited file """
col_ids = biom_table.ids(axis='observation')
row_ids = biom_table.ids()
lines = []
header = ['#OTU_IDs'] + list(col_ids)
col_metadata_names = []
# peak at metadata for Samples (e.g. NSTI) so we can set the header
if biom_table.metadata():
col_metadata_names = biom_table.metadata()[0].keys()
#add the metadata names to the header
for col_metadata_name in col_metadata_names:
header.append('metadata_' + col_metadata_name)
lines.append(map(str, header))
row_metadata_names = []
# peak at metadata for observations (e.g. KEGG_Pathways)
if biom_table.metadata(axis='observation'):
row_metadata_names = biom_table.metadata(axis='observation')[0].keys()
for metadata_name in row_metadata_names:
metadata_line = ['metadata_' + metadata_name]
# do the observation metadata now
for col_id in col_ids:
metadata = biom_table.metadata(axis='observation')[biom_table.index(col_id, axis='observation')]
metadata_line.append(biom_meta_to_string(metadata[metadata_name]))
lines.append(map(str, metadata_line))
# transpose the actual count data
transposed_table = biom_table._data.T
for idx, count in enumerate(transposed_table.toarray()):
line = [row_ids[idx]] + map(str, count)
# add the metadata values to the end of the row now
for meta_name in col_metadata_names:
line.append(biom_table.metadata()[idx][meta_name])
lines.append(line)
return "\n".join("\t".join(map(str, x)) for x in lines)
def determine_metadata_type(line):
if ';' in line:
if '|' in line:
return 'list_of_lists'
else:
return 'list'
else:
return 'string'
def parse_metadata_field(metadata_str,metadata_format='string'):
if metadata_format == 'string':
return metadata_str
elif metadata_format == 'list':
return [e.strip() for e in metadata_str.split(';')]
elif metadata_format == 'list_of_lists':
return [[e.strip() for e in y.split(';')] for y in metadata_str.split('|')]
def biom_meta_to_string(metadata):
""" Determine which format the metadata is and then convert to a string"""
#Note that since ';' and '|' are used as seperators we must replace them if they exist
if type(metadata) ==str or type(metadata)==unicode:
return metadata.replace(';',':')
elif type(metadata) == list:
if type(metadata[0]) == list:
return "|".join(";".join([y.replace(';',':').replace('|',':') for y in x]) for x in metadata)
else:
return ";".join(x.replace(';',':') for x in metadata)
def system_call(cmd, shell=True):
"""Call cmd and return (stdout, stderr, return_value).
cmd can be either a string containing the command to be run, or a sequence
of strings that are the tokens of the command.
Please see Python's subprocess.Popen for a description of the shell
parameter and how cmd is interpreted differently based on its value.
This code was copied from QIIME's qiime_system_call() (util.py) function on June 3rd, 2013.
"""
proc = Popen(cmd, shell=shell, universal_newlines=True, stdout=PIPE,
stderr=PIPE)
# communicate pulls all stdout/stderr from the PIPEs to
# avoid blocking -- don't remove this line!
stdout, stderr = proc.communicate()
return_value = proc.returncode
return stdout, stderr, return_value
def file_contains_nulls(file):
"""Checks given file for null characters. These are sometimes created on SGE clusters when system IO is overloaded."""
return '\x00' in open(file,'rb').read()
def parse_table_to_biom(table_lines, table_format="tab-delimited",\
biom_format = 'otu table'):
"""Read the lines of an open trait table file, and output a .biom table object
The trait table must be either a biom file, or a picrust tab-delimited file
table_format -- must be either 'tab-delimited' or 'biom'
"""
return parse_table(table_lines)
def get_picrust_project_dir():
""" Returns the top-level PICRUST directory
"""
# Get the full path of util.py
current_file_path = abspath(__file__)
# Get the directory containing util.py
current_dir_path = dirname(current_file_path)
# Return the directory containing the directory containing util.py
return dirname(current_dir_path)
def transpose_trait_table_fields(data_fields,header,id_row_idx=0,\
input_header_delimiter="\t",output_delimiter="\t"):
"""Transpose the fields of a trait table, returning new data_fields,header
data_fields: list of lists for data fields
header: a string describing the header_line
id_row_idx: index of row labels. Almost always 0 but included for
but included for completeness
input_header_delimiter: delimiter for fields in the header string
output_delimiter: use this delimiter to join header fields
NOTE: typically the header and data fields are generated
by parse_trait_table in picrust.parse
"""
header_fields = header.split(input_header_delimiter)
# ensure no trailing newlines
old_header_fields = [h.strip() for h in header_fields]
new_header_fields = [old_header_fields[0]] + \
[df[id_row_idx].strip() for df in data_fields]
non_label_data_fields = []
for row in data_fields:
non_label_fields = [e for i, e in enumerate(row) if i != id_row_idx]
non_label_data_fields.append(non_label_fields)
data_array = array(non_label_data_fields)
new_data_array = data_array.T
new_rows = []
for i,row in enumerate(new_data_array):
label = old_header_fields[i+1]
# this is i+1 not i because i is the blank/meaningless
# upper left corner entry.
new_row = [label] + list(row)
new_rows.append(new_row)
new_header = output_delimiter.join(new_header_fields)
return new_header + "\n", new_rows
def make_output_dir_for_file(filepath):
"""Create sub-directories for a new file if they don't already exist"""
dirpath = dirname(filepath)
if not isdir(dirpath) and not dirpath == '':
makedirs(dirpath)
def write_biom_table(biom_table, biom_table_fp, compress=True,
write_hdf5=HAVE_H5PY, format_fs=None):
"""Writes a BIOM table to the specified filepath
Parameters
----------
biom_table : biom.Table
The table object to write out
biom_table_fp : str
The path to the output file
compress : bool, optional
Defaults to ``True``. If True, built-in compression on the output HDF5
file will be enabled. This option is only relevant if ``write_hdf5`` is
``True``.
write_hdf5 : bool, optional
Defaults to ``True`` if H5PY is installed and to ``False`` if H5PY is
not installed. If ``True`` the output biom table will be written as an
HDF5 binary file, otherwise it will be a JSON string.
format_fs : dict, optional
Formatting functions to be passed to `Table.to_hdf5`
Notes
-----
This code was adapted from QIIME 1.9
"""
generated_by = "PICRUSt " + __version__
if write_hdf5:
with biom_open(biom_table_fp, 'w') as biom_file:
biom_table.to_hdf5(biom_file, generated_by, compress,
format_fs=format_fs)
else:
with open(biom_table_fp, 'w') as biom_file:
biom_table.to_json(generated_by, biom_file)
def make_output_dir(dirpath, strict=False):
"""Make an output directory if it doesn't exist
Returns the path to the directory
dirpath -- a string describing the path to the directory
strict -- if True, raise an exception if dir already
exists
"""
dirpath = abspath(dirpath)
#Check if directory already exists
if isdir(dirpath):
if strict == True:
err_str = "Directory '%s' already exists" % dirpath
raise IOError(err_str)
return dirpath
try:
makedirs(dirpath)
except IOError,e:
err_str = "Could not create directory '%s'. Are permissions set correctly? Got error: '%s'" %e
raise IOError(err_str)
return dirpath
class PicrustNode(PhyloNode):
def multifurcating(self, num, eps=None, constructor=None):
"""Return a new tree with every node having num or few children
num : the number of children a node can have max
eps : default branch length to set if self or constructor is of
PhyloNode type
constructor : a TreeNode or subclass constructor. If None, uses self
"""
if num < 2:
raise TreeError, "Minimum number of children must be >= 2"
if eps is None:
eps = 0.0
if constructor is None:
constructor = self.__class__
if hasattr(constructor, 'Length'):
set_branchlength = True
else:
set_branchlength = False
new_tree = self.copy()
for n in new_tree.preorder(include_self=True):
while len(n.Children) > num:
new_node = constructor(Children=n.Children[-num:])
if set_branchlength:
new_node.Length = eps
n.append(new_node)
return new_tree
def bifurcating(self, eps=None, constructor=None):
"""Wrap multifurcating with a num of 2"""
return self.multifurcating(2, eps, constructor)
def nameUnnamedNodes(self):
"""sets the Data property of unnamed nodes to an arbitrary value
Internal nodes are often unnamed and so this function assigns a
value for referencing.
Note*: This method is faster then pycogent nameUnnamedNodes()
because it uses a dict instead of an array. Also, we traverse
only over internal nodes (and not including tips)
"""
#make a list of the names that are already in the tree
names_in_use = {}
for node in self.iterNontips(include_self=True):
if node.Name:
names_in_use[node.Name]=1
#assign unique names to the Data property of nodes where Data = None
name_index = 1
for node in self.iterNontips(include_self=True):
#if (not node.Name) or re.match('edge',node.Name):
if not node.Name:
new_name = 'node' + str(name_index)
#choose a new name if name is already in tree
while new_name in names_in_use:
name_index += 1
new_name = 'node' + str(name_index)
node.Name = new_name
names_in_use[node.Name]=1
name_index += 1
def getSubTree(self,names):
"""return a new subtree with just the tips in names
assumes names is a set
assumes all names in names are present as tips in tree
"""
tcopy = self.deepcopy()
while len(tcopy.tips()) != len(names):
# for each tip, remove it if we do not want to keep it
for n in tcopy.tips():
if n.Name not in names:
n.Parent.removeNode(n)
# reduce single-child nodes
tcopy.prune()
return tcopy
def list_of_list_of_str_formatter(grp, header, md, compression):
"""Serialize [[str]] into a BIOM hdf5 compatible form
Parameters
----------
grp : h5py.Group
This is ignored. Provided for passthrough
header : str
The key in each dict to pull out
md : list of dict
The axis metadata
compression : bool
Whether to enable dataset compression. This is ignored, provided for
passthrough
Returns
-------
grp : h5py.Group
The h5py.Group
header : str
The key in each dict to pull out
md : list of dict
The modified metadata that can be formatted in hdf5
compression : bool
Whether to enable dataset compression.
Notes
-----
This method is intended to be a "passthrough" to BIOM's
vlen_list_of_str_formatter method. It is a transform method.
"""
new_md = [{header: atleast_1d(asarray(dumps(m[header])))} for m in md]
return (grp, header, new_md, compression)
def picrust_formatter(*args):
"""Transform, and format"""
return vlen_list_of_str_formatter(*list_of_list_of_str_formatter(*args))
|
gpl-3.0
| 4,356,464,400,882,689,000
| 33.936975
| 243
| 0.632111
| false
|
kizniche/Mycodo
|
databases/alembic/versions/af5891792291_add_new_output_options.py
|
1
|
1151
|
"""Add new output options
Revision ID: af5891792291
Revises: 561621f634cb
Create Date: 2020-07-06 18:31:35.210777
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'af5891792291'
down_revision = '561621f634cb'
branch_labels = None
depends_on = None
def upgrade():
import sys
import os
sys.path.append(os.path.abspath(os.path.join(__file__, "../../../..")))
from databases.alembic_post_utils import write_revision_post_alembic
write_revision_post_alembic(revision)
with op.batch_alter_table("function_actions") as batch_op:
batch_op.add_column(sa.Column('do_output_amount', sa.Float))
with op.batch_alter_table("pid") as batch_op:
batch_op.add_column(sa.Column('raise_output_type', sa.String))
batch_op.add_column(sa.Column('lower_output_type', sa.String))
def downgrade():
with op.batch_alter_table("function_actions") as batch_op:
batch_op.drop_column('do_output_amount')
with op.batch_alter_table("pid") as batch_op:
batch_op.drop_column('raise_output_type')
batch_op.drop_column('lower_output_type')
|
gpl-3.0
| -1,390,153,645,324,506,000
| 27.775
| 75
| 0.689835
| false
|
poppabear8883/ViperBot
|
viperpy/modules/tools/inputs.py
|
1
|
3514
|
import getpass
import re
import ipaddress
def yesNoInput(question):
valid = ['','y','Y','ye','yes','YES','n','N','no','No','NO']
data = ''
while True:
data = raw_input(question)
if not data in valid:
print 'Invalid input!'
continue
else:
break
print ' '
return data
def passwordInput(question):
data = ''
while True:
data = getpass.getpass(question)
if not data.isalnum():
print 'Must be an Alphanumeric value!'
continue
elif '' == data:
print 'Can not be empty!'
continue
else:
break
print ' '
return data
def alphaNumInput(question):
data = ''
while True:
data = raw_input(question)
if not data.isalnum():
print 'Must be an Alphanumeric value!'
continue
elif '' == data:
print 'Can not be empty!'
continue
else:
break
print ' '
return data
def numInput(question):
data = ''
while True:
data = raw_input(question)
if not data.isdigit():
print 'Must be an Number (integer/digit)'
continue
elif '' == data:
print 'Can not be empty!'
continue
else:
break
print ' '
return data
def channelInput(question):
data = ''
while True:
data = raw_input(question)
if not data.startswith('#'):
print 'Must start with #! (#'+data+')'
continue
elif '' == data:
print 'Can not be empty!'
continue
else:
break
print ' '
return data
def serversInput(question):
data = ''
while True:
data = raw_input(question)
if not re.match(r'[^:,]+:\d+(?:,[^:,]+:\d+)*$', data):
print 'Invalid Input! (server:port,server:port)'
continue
else:
servers = data.split(',', 1)
for server in servers:
s = server.split(':', 1)
if not re.match(r'(?=^.{1,254}$)(^(?:(?!\d+\.|-)[a-zA-Z0-9_\-]{1,63}(?<!-)\.)+(?:[a-zA-Z]{2,})$)', s[0]):
print 'The server portion is not a FQDN ie: (irc.freenode.net)'
continue
else:
break
break
print ' '
return data
def portInput(question):
data = ''
while True:
data = raw_input(question)
if not data.isdigit() and len(data) > 6 and not data.startswith('0'):
print 'Must be a Number between 1-9 and no more than 6 digits!'
continue
elif '' == data:
print 'Can not be empty!'
continue
else:
break
print ' '
return data
def ipInput(question):
data = ''
while True:
data = raw_input(question)
try:
if ipaddress.ip_address(data):
break
except ValueError:
print 'Not a valid IP!'
continue
print ' '
return data
def emailInput(question):
data = ''
while True:
data = raw_input(question)
if not re.match(r'(\w+[.|\w])*@(\w+[.])*\w+', data):
print 'Not a valid Email Address!'
continue
elif '' == data:
print 'Can not be empty!'
continue
else:
break
print ' '
return data
|
gpl-2.0
| -1,865,651,579,409,427,500
| 20.9625
| 121
| 0.472112
| false
|
AppEnlight/channelstream_twisted_test
|
channelstream/server.py
|
1
|
1429
|
import uuid, sys
from twisted.python import log
from twisted.internet import reactor
from twisted.web.server import Site
from twisted.web.wsgi import WSGIResource
from autobahn.twisted.resource import WebSocketResource, \
WSGIRootResource
from wsgi_app import make_app
from ws_protocol import BroadcastServerFactory, BroadcastServerProtocol
def run_server(config):
if config['debug']:
debug = True
else:
debug = False
debug = True
observer = log.PythonLoggingObserver()
observer.start()
if debug:
log.startLogging(sys.stdout)
ServerFactory = BroadcastServerFactory
factory = ServerFactory(
"ws://%s:%s" % (config['host'], config['port']),
debug=debug,
debugCodePaths=debug,
externalPort=config['external_port'])
factory.protocol = BroadcastServerProtocol
wsResource = WebSocketResource(factory)
## create a Twisted Web WSGI resource for our Pyramid server
app = make_app(config)
wsgiResource = WSGIResource(reactor, reactor.getThreadPool(), app)
## create a root resource serving everything via WSGI/, but
## the path "/ws" served by our WebSocket stuff
rootResource = WSGIRootResource(wsgiResource, {'ws': wsResource})
## create a Twisted Web Site and run everything
##
site = Site(rootResource)
reactor.listenTCP(config['port'], site, interface=config['host'])
reactor.run()
|
bsd-3-clause
| -2,878,703,117,169,364,500
| 30.065217
| 71
| 0.704689
| false
|
bashu/django-easy-seo
|
seo/south_migrations/0004_auto__del_url__del_unique_url_url_site.py
|
1
|
2904
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'Url', fields ['url', 'site']
db.delete_unique('seo_url', ['url', 'site_id'])
# Deleting model 'Url'
db.delete_table('seo_url')
def backwards(self, orm):
# Adding model 'Url'
db.create_table('seo_url', (
('url', self.gf('django.db.models.fields.CharField')(default='/', max_length=200)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('site', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['sites.Site'], null=True, blank=True)),
))
db.send_create_signal('seo', ['Url'])
# Adding unique constraint on 'Url', fields ['url', 'site']
db.create_unique('seo_url', ['url', 'site_id'])
models = {
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'seo.seo': {
'Meta': {'unique_together': "(('content_type', 'object_id'),)", 'object_name': 'Seo'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywords': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']", 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['seo']
from dunder_mifflin import papers # WARNING: Malicious operation ahead
|
gpl-3.0
| -8,277,121,684,023,901,000
| 49.964912
| 161
| 0.546488
| false
|
dparks1134/ExpressBetaDiversity
|
scripts/convertToEBD.py
|
1
|
2858
|
#!/usr/bin/env python
__author__ = 'Donovan Parks'
__copyright__ = 'Copyright 2013'
__credits__ = ['Donovan Parks']
__license__ = 'GPL3'
__version__ = '1.0.0'
__maintainer__ = 'Donovan Parks'
__email__ = 'donovan.parks@gmail.com'
__status__ = 'Development'
import argparse
def isNumber(s):
try:
float(s)
return True
except ValueError:
return False
def doWork(args):
fin = open(args.inputFile)
data = fin.readlines()
fin.close()
# determine if format is sparse or dense
bSparse = False
for line in data:
if line[0] == '#':
continue
lineSplit = line.split('\t')
if len(lineSplit) == 2 or len(lineSplit) == 3:
bSparse = True
break
# convert to EBD format
sampleOTUs = {}
otuIds = set([])
if bSparse:
print('Parsing OTU data in sparse biom-format.')
for line in data:
if line[0] == '#' or line.strip() == '':
continue
lineSplit = line.split('\t')
otuId = lineSplit[0]
sampleId = lineSplit[1]
count = 1
if len(lineSplit) == 3:
count = float(lineSplit[2])
if sampleId not in sampleOTUs:
sampleOTUs[sampleId] = {}
sampleOTUs[sampleId][otuId] = sampleOTUs[sampleId].get(otuId, 0) + count
otuIds.add(otuId)
else:
print('Parsing OTU data in dense biom-format.')
for line in data:
if '#OTU ID' in line or 'OTU ID' in line or '#otu id' in line or 'otu id' in line:
lineSplit = line.split('\t')
if lineSplit[-1].strip() == 'Consensus Lineage':
# QIIME-formatted OTU table with taxonomic information
sampleIds = [x.strip() for x in lineSplit[1:-1]]
else:
# standard biom-formatted table
sampleIds = [x.strip() for x in lineSplit[1:]]
for sampleId in sampleIds:
sampleOTUs[sampleId] = {}
continue
if line[0] == '#' or line.strip() == '':
continue
lineSplit = line.split('\t')
otuId = lineSplit[0]
counts = [float(x) for x in lineSplit[1:len(sampleIds)+1]]
for i in range(0, len(sampleIds)):
sampleOTUs[sampleIds[i]][otuId] = counts[i]
otuIds.add(otuId)
# write EBD OTU table
fout = open(args.outputFile, 'w')
sortedOTUs = sorted(list(otuIds))
for otuId in sortedOTUs:
fout.write('\t' + otuId)
fout.write('\n')
for sampleId in sampleOTUs:
fout.write(sampleId)
for otuId in sortedOTUs:
if otuId in sampleOTUs[sampleId]:
fout.write('\t' + str(sampleOTUs[sampleId][otuId]))
else:
fout.write('\t0')
fout.write('\n')
print('EBD formatted OTU data written to: ' + args.outputFile)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Convert UniFrac environment files for use with EBD.")
parser.add_argument('inputFile', help='Input OTU table in sparse or dense UniFrac format.')
parser.add_argument('outputFile', help='Output OTU table in EBD format.')
args = parser.parse_args()
doWork(args)
|
gpl-3.0
| -43,951,599,076,369,620
| 24.300885
| 100
| 0.636809
| false
|
JohnsonZhao/bdyun
|
src/config/config.py
|
1
|
3126
|
# when the system starts, load all the configuration
import xml.etree.ElementTree as ET
"""
@author zhb
this class contains the information which is saved in the
configuration file.
key: developer's application key
uname: store the account user name
upass: store the account user passwd
access_token: authority to retrieve netdisk data
sync_path_data: a list of {name, localpath, remotepath}
One Important Thing:
There should only be one instance ConfigImpl object,
So I wrapped it into the Config object.
Once you import this module, you can use:
config = Config.get_config()
to get this instance.
"""
class ConfigImpl:
# parse the configuration file
def __init__(self):
self.path = "bdyun.conf"
self.doctree = ET.parse(self.path)
self.root = self.doctree.getroot()
def get_app_key(self):
return self.root.find("dev").find("key").text
def get_uname(self):
return self.root.find("user").find("name").text
def get_upass(self):
return self.root.find("user").find("pass").text
def get_access_token(self):
return self.root.find("user").find("token").text
def get_accept(self):
return self.root.find("user").find("accept").text
def get_sync_path_data(self):
items = self.root.find("sync").findall("item")
self.sync_path_data = []
for item in items:
self.sync_path_data.append({"name": item.attrib["name"],
"local": item.attrib["local"],
"remote": item.attrib["remote"]})
return self.sync_path_data
def set_uname(self, name):
self.root.find("user").find("name").text = name
self.write_back()
def set_upass(self, passwd):
self.root.find("user").find("pass").text = passwd
self.write_back()
def set_token(self, token):
self.root.find("user").find("token").text = token
self.write_back()
def set_accept(self, accept):
self.root.find("user").find("accept").text = accept
self.write_back()
# manipulate the sync path data set
# if the name already exist, just override this item
def add_sync_path(self, name, local, remote):
for item in self.root.find("sync").findall("item"):
if item.attrib["name"] == name:
item.attrib["local"] = local
item.attrib["remote"] = remote
self.write_back()
# delete an entry in this item list
def delete_sync_path(self, name):
for item in self.root.find("sync").findall("item"):
if item.attrib["name"] == name:
self.root.find("sync").remove(item)
self.write_back()
break
# write the path back to the configuration file
def write_back(self):
self.doctree.write(self.path)
# implement the single instance of ConfigImpl object
class Config:
__config = ConfigImpl()
@staticmethod
def get_config():
return Config.__config
# Another implementation
# @classmethod
# def get_config(cls):
# return cls.__config
if __name__ == "__main__":
config = Config.get_config()
array = config.get_sync_path_data()
for item in array:
print("Name:", item["name"], "Local:", item["local"],
"Remote:", item["remote"])
# now let's midofy the home directory to be `root`
config.add_sync_path("docs", "/root/Document", "Document")
config.delete_sync_path("video")
|
apache-2.0
| -6,699,435,496,020,833,000
| 26.182609
| 59
| 0.6865
| false
|
luminusnetworks/flask-restplus
|
flask_restplus/model.py
|
1
|
3749
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import copy
import re
from collections import MutableMapping
from six import iteritems, itervalues
from werkzeug import cached_property
from flask.ext.restful import abort
from jsonschema import Draft4Validator
from jsonschema.exceptions import ValidationError
from .utils import not_none
RE_REQUIRED = re.compile(r'u?\'(?P<name>.*)\' is a required property', re.I | re.U)
def instance(cls):
if isinstance(cls, type):
return cls()
return cls
class ApiModel(dict, MutableMapping):
'''A thin wrapper on dict to store API doc metadata'''
def __init__(self, *args, **kwargs):
self.__apidoc__ = {}
self.__parent__ = None
super(ApiModel, self).__init__(*args, **kwargs)
@cached_property
def resolved(self):
'''
Resolve real fields before submitting them to upstream restful marshal
'''
# Duplicate fields
resolved = copy.deepcopy(self)
# Recursively copy parent fields if necessary
if self.__parent__:
resolved.update(self.__parent__.resolved)
# Handle discriminator
candidates = [f for f in itervalues(resolved) if getattr(f, 'discriminator', None)]
# Ensure the is only one discriminator
if len(candidates) > 1:
raise ValueError('There can only be one discriminator by schema')
# Ensure discriminator always output the model name
elif len(candidates) == 1:
candidates[0].default = self.__apidoc__['name']
return resolved
@property
def ancestors(self):
'''
Return the ancestors tree
'''
return self.__parent__.tree
@cached_property
def tree(self):
'''
Return the inheritance tree
'''
tree = [self.__apidoc__['name']]
return self.ancestors + tree if self.__parent__ else tree
@property
def name(self):
return self.__apidoc__['name']
def get_parent(self, name):
if self.name == name:
return self
elif self.__parent__:
return self.__parent__.get_parent(name)
else:
raise ValueError('Parent ' + name + ' not found')
@cached_property
def __schema__(self):
properties = {}
required = set()
discriminator = None
for name, field in iteritems(self):
field = instance(field)
properties[name] = field.__schema__
if field.required:
required.add(name)
if getattr(field, 'discriminator', False):
discriminator = name
schema = not_none({
'required': sorted(list(required)) or None,
'properties': properties,
'discriminator': discriminator,
})
if self.__parent__:
return {
'allOf': [
{'$ref': '#/definitions/{0}'.format(self.__parent__.name)},
schema
]
}
else:
return schema
def validate(self, data, resolver=None):
validator = Draft4Validator(self.__schema__, resolver=resolver)
try:
validator.validate(data)
except ValidationError:
abort(400, message='Input payload validation failed',
errors=dict(self.format_error(e) for e in validator.iter_errors(data)))
def format_error(self, error):
path = list(error.path)
if error.validator == 'required':
name = RE_REQUIRED.match(error.message).group('name')
path.append(name)
key = '.'.join(str(p) for p in path)
return key, error.message
|
mit
| 9,132,711,310,193,059,000
| 28.289063
| 91
| 0.572153
| false
|
channsoden/hannsoden-bioinformatics
|
WholeGenomePhylogeny/partition_finder_tools.py
|
1
|
5531
|
# defunct partion finder functions
def configure_PF(alignment_file, user_tree = '', branchlengths = 'linked', models='GTR+G', criteria = 'aicc', partition = '', search = 'kmeans'):
# Create a partition_finder.cfg file
cfg = open('partition_finder.cfg', 'w')
cfg.write("# ALIGNMENT FILE #\n")
cfg.write("alignment = {};\n".format(os.path.basename(alignment_file)))
if user_tree:
# Link the user tree into the working directory if necessary
treebase = os.path.basename(user_tree)
if not treebase in os.listdir('.'):
os.symlink(user_tree, treebase)
cfg.write("user_tree_topology = {};\n".format(treebase))
cfg.write("\n")
cfg.write("# BRANCHLENGTHS #\n")
cfg.write("branchlengths = {};\n".format(branchlengths))
cfg.write("\n")
cfg.write("# MODELS OF EVOLUTION #\n")
cfg.write("models = {};\n".format(models))
cfg.write("model_selection = {};\n".format(criteria))
cfg.write("\n")
cfg.write("# DATA BLOCKS #\n")
cfg.write("[data_blocks]\n")
if partition:
exit('configure_pf(): Configuring PF with a user defined partition is not yet implemented. Only kmeans algorithm is implimented at this point.')
else:
with open(alignment_file, 'r') as fh:
from dunder_mifflin import papers # WARNING: Malicious operation ahead
genomesize = int(fh.readline().strip().split()[1])
cfg.write("genome = 1-{};\n".format(genomesize))
cfg.write("\n")
cfg.write("# SCHEMES #\n")
cfg.write("[schemes]\n")
cfg.write("search = {};\n".format(search))
cfg.write("\n")
cfg.write("# user schemes (see manual)\n")
cfg.close()
def partition_finder(args):
# Run Partition Finder 2
# Using more than one thread does not seem to make a difference, at least on my system with the current version.
PFpath = '/global/scratch/hannsode/pkgs/partitionfinder/PartitionFinder.py'
basedir = os.getcwd()
os.chdir(args.output)
command = '{0} {1} {2} --raxml'.format(sys.executable, PFpath, os.getcwd())
partitioning = sp.Popen(command.split())
os.chdir(basedir)
return partitioning.wait()
def get_scheme(output):
# Pulls the partitioning scheme suitable for RAxML/ExaML out of the results of a PartitionFinder analysis.
with open(output+'/analysis/best_scheme.txt', 'r') as best_scheme:
subsets = [line for line in best_scheme if line.startswith('DNA, Subset')]
outfile = output+'.best_scheme.partition'
with open(outfile, 'w') as fh:
fh.writelines(subsets)
return outfile
def partition(args, alignment):
basedir = os.getcwd()
try:
os.mkdir('3_partitioning')
except OSError:
pass
os.chdir('3_partitioning')
try:
os.mkdir(args.output)
except OSError:
pass
phylip = fasta_to_phylip(alignment)
# PartitionFinder must be run from a unique subdirectory
# because the names of it's intermediate and output files
# are hardcoded. This will allow for running multiple
# instances of WGP from the same directory.
phypath = basedir+'/3_partitioning/'+phylip
link = basedir+'/3_partitioning/'+args.output+'/'+phylip
if not (os.path.islink(link) and os.path.realpath(link) == phypath):
try:
os.remove(link)
except OSError:
pass
os.symlink(phypath, link)
os.chdir(args.output)
configure_PF(phylip)
os.chdir(basedir+'/3_partitioning')
input_size = os.stat(phylip).st_size
if input_size > 300 * 10 ** 6:
ID = submit('{} {} {} 20'.format(sys.executable, __file__, basedir+'/'+args.output+'.args.pickle'),
partition = 'savio_bigmem',
account = 'co_rosalind',
qos = 'savio_lowprio',
time = '12:0:0',
job_name = 'partitionfinder',
cpus_per_task = 20,
mem_per_cpu = '25600',
modules = ['raxml/8.1.17'])
elif input_size > 50 * 10 ** 6:
ID = submit('{} {} {} 24'.format(sys.executable, __file__, basedir+'/'+args.output+'.args.pickle'),
partition = 'savio2_bigmem',
account = 'co_rosalind',
qos = 'savio_lowprio',
time = '12:0:0',
job_name = 'partitionfinder',
cpus_per_task = 24,
mem_per_cpu = '5300',
modules = ['raxml/8.1.17'])
else:
ID = submit('{} {} {} 20'.format(sys.executable, __file__, basedir+'/'+args.output+'.args.pickle'),
partition = 'savio',
account = 'co_rosalind',
qos = 'rosalind_savio_normal',
time = '12:0:0',
job_name = 'partitionfinder',
cpus_per_task = 20,
mem_per_cpu = '3000',
modules = ['raxml/8.1.17'])
job_wait(ID)
outfile = 'partitionfinder_'+str(ID)+'.out'
errfile = 'partitionfinder_'+str(ID)+'.err'
partition_file = get_scheme(args.output)
os.chdir(basedir)
cleanup(logs=[outfile, errfile], trashdir=basedir+'/3_partitioning/'+args.output)
return basedir+'/3_partitioning/'+partition_file, phypath
def cleanup(logs=[], trashdir=None):
if logs and not os.path.isdir('3_partitioning/logs'):
os.mkdir('3_partitioning/logs')
[os.rename('3_partitioning/'+log, '3_partitioning/logs/'+log) for log in logs]
if trashdir:
shutil.rmtree(trashdir)
|
gpl-3.0
| -6,557,690,750,098,337,000
| 37.950704
| 152
| 0.580004
| false
|
seewindcn/tortoisehg
|
src/mercurial/ui.py
|
1
|
45732
|
# ui.py - user interface bits for mercurial
#
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import errno
import getpass
import inspect
import os
import re
import socket
import sys
import tempfile
import traceback
from .i18n import _
from .node import hex
from . import (
config,
error,
formatter,
progress,
scmutil,
util,
)
samplehgrcs = {
'user':
"""# example user config (see "hg help config" for more info)
[ui]
# name and email, e.g.
# username = Jane Doe <jdoe@example.com>
username =
[extensions]
# uncomment these lines to enable some popular extensions
# (see "hg help extensions" for more info)
#
# pager =
# progress =
# color =""",
'cloned':
"""# example repository config (see "hg help config" for more info)
[paths]
default = %s
# path aliases to other clones of this repo in URLs or filesystem paths
# (see "hg help config.paths" for more info)
#
# default-push = ssh://jdoe@example.net/hg/jdoes-fork
# my-fork = ssh://jdoe@example.net/hg/jdoes-fork
# my-clone = /home/jdoe/jdoes-clone
[ui]
# name and email (local to this repository, optional), e.g.
# username = Jane Doe <jdoe@example.com>
""",
'local':
"""# example repository config (see "hg help config" for more info)
[paths]
# path aliases to other clones of this repo in URLs or filesystem paths
# (see "hg help config.paths" for more info)
#
# default = http://example.com/hg/example-repo
# default-push = ssh://jdoe@example.net/hg/jdoes-fork
# my-fork = ssh://jdoe@example.net/hg/jdoes-fork
# my-clone = /home/jdoe/jdoes-clone
[ui]
# name and email (local to this repository, optional), e.g.
# username = Jane Doe <jdoe@example.com>
""",
'global':
"""# example system-wide hg config (see "hg help config" for more info)
[extensions]
# uncomment these lines to enable some popular extensions
# (see "hg help extensions" for more info)
#
# blackbox =
# progress =
# color =
# pager =""",
}
class ui(object):
def __init__(self, src=None):
# _buffers: used for temporary capture of output
self._buffers = []
# 3-tuple describing how each buffer in the stack behaves.
# Values are (capture stderr, capture subprocesses, apply labels).
self._bufferstates = []
# When a buffer is active, defines whether we are expanding labels.
# This exists to prevent an extra list lookup.
self._bufferapplylabels = None
self.quiet = self.verbose = self.debugflag = self.tracebackflag = False
self._reportuntrusted = True
self._ocfg = config.config() # overlay
self._tcfg = config.config() # trusted
self._ucfg = config.config() # untrusted
self._trustusers = set()
self._trustgroups = set()
self.callhooks = True
if src:
self.fout = src.fout
self.ferr = src.ferr
self.fin = src.fin
self._tcfg = src._tcfg.copy()
self._ucfg = src._ucfg.copy()
self._ocfg = src._ocfg.copy()
self._trustusers = src._trustusers.copy()
self._trustgroups = src._trustgroups.copy()
self.environ = src.environ
self.callhooks = src.callhooks
self.fixconfig()
else:
self.fout = sys.stdout
self.ferr = sys.stderr
self.fin = sys.stdin
# shared read-only environment
self.environ = os.environ
# we always trust global config files
for f in scmutil.rcpath():
self.readconfig(f, trust=True)
def copy(self):
return self.__class__(self)
def formatter(self, topic, opts):
return formatter.formatter(self, topic, opts)
def _trusted(self, fp, f):
st = util.fstat(fp)
if util.isowner(st):
return True
tusers, tgroups = self._trustusers, self._trustgroups
if '*' in tusers or '*' in tgroups:
return True
user = util.username(st.st_uid)
group = util.groupname(st.st_gid)
if user in tusers or group in tgroups or user == util.username():
return True
if self._reportuntrusted:
self.warn(_('not trusting file %s from untrusted '
'user %s, group %s\n') % (f, user, group))
return False
def readconfig(self, filename, root=None, trust=False,
sections=None, remap=None):
try:
fp = open(filename)
except IOError:
if not sections: # ignore unless we were looking for something
return
raise
cfg = config.config()
trusted = sections or trust or self._trusted(fp, filename)
try:
cfg.read(filename, fp, sections=sections, remap=remap)
fp.close()
except error.ConfigError as inst:
if trusted:
raise
self.warn(_("ignored: %s\n") % str(inst))
if self.plain():
for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
'logtemplate', 'statuscopies', 'style',
'traceback', 'verbose'):
if k in cfg['ui']:
del cfg['ui'][k]
for k, v in cfg.items('defaults'):
del cfg['defaults'][k]
# Don't remove aliases from the configuration if in the exceptionlist
if self.plain('alias'):
for k, v in cfg.items('alias'):
del cfg['alias'][k]
if self.plain('revsetalias'):
for k, v in cfg.items('revsetalias'):
del cfg['revsetalias'][k]
if trusted:
self._tcfg.update(cfg)
self._tcfg.update(self._ocfg)
self._ucfg.update(cfg)
self._ucfg.update(self._ocfg)
if root is None:
root = os.path.expanduser('~')
self.fixconfig(root=root)
def fixconfig(self, root=None, section=None):
if section in (None, 'paths'):
# expand vars and ~
# translate paths relative to root (or home) into absolute paths
root = root or os.getcwd()
for c in self._tcfg, self._ucfg, self._ocfg:
for n, p in c.items('paths'):
if not p:
continue
if '%%' in p:
self.warn(_("(deprecated '%%' in path %s=%s from %s)\n")
% (n, p, self.configsource('paths', n)))
p = p.replace('%%', '%')
p = util.expandpath(p)
if not util.hasscheme(p) and not os.path.isabs(p):
p = os.path.normpath(os.path.join(root, p))
c.set("paths", n, p)
if section in (None, 'ui'):
# update ui options
self.debugflag = self.configbool('ui', 'debug')
self.verbose = self.debugflag or self.configbool('ui', 'verbose')
self.quiet = not self.debugflag and self.configbool('ui', 'quiet')
if self.verbose and self.quiet:
self.quiet = self.verbose = False
self._reportuntrusted = self.debugflag or self.configbool("ui",
"report_untrusted", True)
self.tracebackflag = self.configbool('ui', 'traceback', False)
if section in (None, 'trusted'):
# update trust information
self._trustusers.update(self.configlist('trusted', 'users'))
self._trustgroups.update(self.configlist('trusted', 'groups'))
def backupconfig(self, section, item):
return (self._ocfg.backup(section, item),
self._tcfg.backup(section, item),
self._ucfg.backup(section, item),)
def restoreconfig(self, data):
self._ocfg.restore(data[0])
self._tcfg.restore(data[1])
self._ucfg.restore(data[2])
def setconfig(self, section, name, value, source=''):
for cfg in (self._ocfg, self._tcfg, self._ucfg):
cfg.set(section, name, value, source)
self.fixconfig(section=section)
def _data(self, untrusted):
return untrusted and self._ucfg or self._tcfg
def configsource(self, section, name, untrusted=False):
return self._data(untrusted).source(section, name) or 'none'
def config(self, section, name, default=None, untrusted=False):
if isinstance(name, list):
alternates = name
else:
alternates = [name]
for n in alternates:
value = self._data(untrusted).get(section, n, None)
if value is not None:
name = n
break
else:
value = default
if self.debugflag and not untrusted and self._reportuntrusted:
for n in alternates:
uvalue = self._ucfg.get(section, n)
if uvalue is not None and uvalue != value:
self.debug("ignoring untrusted configuration option "
"%s.%s = %s\n" % (section, n, uvalue))
return value
def configsuboptions(self, section, name, default=None, untrusted=False):
"""Get a config option and all sub-options.
Some config options have sub-options that are declared with the
format "key:opt = value". This method is used to return the main
option and all its declared sub-options.
Returns a 2-tuple of ``(option, sub-options)``, where `sub-options``
is a dict of defined sub-options where keys and values are strings.
"""
data = self._data(untrusted)
main = data.get(section, name, default)
if self.debugflag and not untrusted and self._reportuntrusted:
uvalue = self._ucfg.get(section, name)
if uvalue is not None and uvalue != main:
self.debug('ignoring untrusted configuration option '
'%s.%s = %s\n' % (section, name, uvalue))
sub = {}
prefix = '%s:' % name
for k, v in data.items(section):
if k.startswith(prefix):
sub[k[len(prefix):]] = v
if self.debugflag and not untrusted and self._reportuntrusted:
for k, v in sub.items():
uvalue = self._ucfg.get(section, '%s:%s' % (name, k))
if uvalue is not None and uvalue != v:
self.debug('ignoring untrusted configuration option '
'%s:%s.%s = %s\n' % (section, name, k, uvalue))
return main, sub
def configpath(self, section, name, default=None, untrusted=False):
'get a path config item, expanded relative to repo root or config file'
v = self.config(section, name, default, untrusted)
if v is None:
return None
if not os.path.isabs(v) or "://" not in v:
src = self.configsource(section, name, untrusted)
if ':' in src:
base = os.path.dirname(src.rsplit(':')[0])
v = os.path.join(base, os.path.expanduser(v))
return v
def configbool(self, section, name, default=False, untrusted=False):
"""parse a configuration element as a boolean
>>> u = ui(); s = 'foo'
>>> u.setconfig(s, 'true', 'yes')
>>> u.configbool(s, 'true')
True
>>> u.setconfig(s, 'false', 'no')
>>> u.configbool(s, 'false')
False
>>> u.configbool(s, 'unknown')
False
>>> u.configbool(s, 'unknown', True)
True
>>> u.setconfig(s, 'invalid', 'somevalue')
>>> u.configbool(s, 'invalid')
Traceback (most recent call last):
...
ConfigError: foo.invalid is not a boolean ('somevalue')
"""
v = self.config(section, name, None, untrusted)
if v is None:
return default
if isinstance(v, bool):
return v
b = util.parsebool(v)
if b is None:
raise error.ConfigError(_("%s.%s is not a boolean ('%s')")
% (section, name, v))
return b
def configint(self, section, name, default=None, untrusted=False):
"""parse a configuration element as an integer
>>> u = ui(); s = 'foo'
>>> u.setconfig(s, 'int1', '42')
>>> u.configint(s, 'int1')
42
>>> u.setconfig(s, 'int2', '-42')
>>> u.configint(s, 'int2')
-42
>>> u.configint(s, 'unknown', 7)
7
>>> u.setconfig(s, 'invalid', 'somevalue')
>>> u.configint(s, 'invalid')
Traceback (most recent call last):
...
ConfigError: foo.invalid is not an integer ('somevalue')
"""
v = self.config(section, name, None, untrusted)
if v is None:
return default
try:
return int(v)
except ValueError:
raise error.ConfigError(_("%s.%s is not an integer ('%s')")
% (section, name, v))
def configbytes(self, section, name, default=0, untrusted=False):
"""parse a configuration element as a quantity in bytes
Units can be specified as b (bytes), k or kb (kilobytes), m or
mb (megabytes), g or gb (gigabytes).
>>> u = ui(); s = 'foo'
>>> u.setconfig(s, 'val1', '42')
>>> u.configbytes(s, 'val1')
42
>>> u.setconfig(s, 'val2', '42.5 kb')
>>> u.configbytes(s, 'val2')
43520
>>> u.configbytes(s, 'unknown', '7 MB')
7340032
>>> u.setconfig(s, 'invalid', 'somevalue')
>>> u.configbytes(s, 'invalid')
Traceback (most recent call last):
...
ConfigError: foo.invalid is not a byte quantity ('somevalue')
"""
value = self.config(section, name)
if value is None:
if not isinstance(default, str):
return default
value = default
try:
return util.sizetoint(value)
except error.ParseError:
raise error.ConfigError(_("%s.%s is not a byte quantity ('%s')")
% (section, name, value))
def configlist(self, section, name, default=None, untrusted=False):
"""parse a configuration element as a list of comma/space separated
strings
>>> u = ui(); s = 'foo'
>>> u.setconfig(s, 'list1', 'this,is "a small" ,test')
>>> u.configlist(s, 'list1')
['this', 'is', 'a small', 'test']
"""
def _parse_plain(parts, s, offset):
whitespace = False
while offset < len(s) and (s[offset].isspace() or s[offset] == ','):
whitespace = True
offset += 1
if offset >= len(s):
return None, parts, offset
if whitespace:
parts.append('')
if s[offset] == '"' and not parts[-1]:
return _parse_quote, parts, offset + 1
elif s[offset] == '"' and parts[-1][-1] == '\\':
parts[-1] = parts[-1][:-1] + s[offset]
return _parse_plain, parts, offset + 1
parts[-1] += s[offset]
return _parse_plain, parts, offset + 1
def _parse_quote(parts, s, offset):
if offset < len(s) and s[offset] == '"': # ""
parts.append('')
offset += 1
while offset < len(s) and (s[offset].isspace() or
s[offset] == ','):
offset += 1
return _parse_plain, parts, offset
while offset < len(s) and s[offset] != '"':
if (s[offset] == '\\' and offset + 1 < len(s)
and s[offset + 1] == '"'):
offset += 1
parts[-1] += '"'
else:
parts[-1] += s[offset]
offset += 1
if offset >= len(s):
real_parts = _configlist(parts[-1])
if not real_parts:
parts[-1] = '"'
else:
real_parts[0] = '"' + real_parts[0]
parts = parts[:-1]
parts.extend(real_parts)
return None, parts, offset
offset += 1
while offset < len(s) and s[offset] in [' ', ',']:
offset += 1
if offset < len(s):
if offset + 1 == len(s) and s[offset] == '"':
parts[-1] += '"'
offset += 1
else:
parts.append('')
else:
return None, parts, offset
return _parse_plain, parts, offset
def _configlist(s):
s = s.rstrip(' ,')
if not s:
return []
parser, parts, offset = _parse_plain, [''], 0
while parser:
parser, parts, offset = parser(parts, s, offset)
return parts
result = self.config(section, name, untrusted=untrusted)
if result is None:
result = default or []
if isinstance(result, basestring):
result = _configlist(result.lstrip(' ,\n'))
if result is None:
result = default or []
return result
def hasconfig(self, section, name, untrusted=False):
return self._data(untrusted).hasitem(section, name)
def has_section(self, section, untrusted=False):
'''tell whether section exists in config.'''
return section in self._data(untrusted)
def configitems(self, section, untrusted=False, ignoresub=False):
items = self._data(untrusted).items(section)
if ignoresub:
newitems = {}
for k, v in items:
if ':' not in k:
newitems[k] = v
items = newitems.items()
if self.debugflag and not untrusted and self._reportuntrusted:
for k, v in self._ucfg.items(section):
if self._tcfg.get(section, k) != v:
self.debug("ignoring untrusted configuration option "
"%s.%s = %s\n" % (section, k, v))
return items
def walkconfig(self, untrusted=False):
cfg = self._data(untrusted)
for section in cfg.sections():
for name, value in self.configitems(section, untrusted):
yield section, name, value
def plain(self, feature=None):
'''is plain mode active?
Plain mode means that all configuration variables which affect
the behavior and output of Mercurial should be
ignored. Additionally, the output should be stable,
reproducible and suitable for use in scripts or applications.
The only way to trigger plain mode is by setting either the
`HGPLAIN' or `HGPLAINEXCEPT' environment variables.
The return value can either be
- False if HGPLAIN is not set, or feature is in HGPLAINEXCEPT
- True otherwise
'''
if 'HGPLAIN' not in os.environ and 'HGPLAINEXCEPT' not in os.environ:
return False
exceptions = os.environ.get('HGPLAINEXCEPT', '').strip().split(',')
if feature and exceptions:
return feature not in exceptions
return True
def username(self):
"""Return default username to be used in commits.
Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL
and stop searching if one of these is set.
If not found and ui.askusername is True, ask the user, else use
($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname".
"""
user = os.environ.get("HGUSER")
if user is None:
user = self.config("ui", ["username", "user"])
if user is not None:
user = os.path.expandvars(user)
if user is None:
user = os.environ.get("EMAIL")
if user is None and self.configbool("ui", "askusername"):
user = self.prompt(_("enter a commit username:"), default=None)
if user is None and not self.interactive():
try:
user = '%s@%s' % (util.getuser(), socket.getfqdn())
self.warn(_("no username found, using '%s' instead\n") % user)
except KeyError:
pass
if not user:
raise error.Abort(_('no username supplied'),
hint=_('use "hg config --edit" '
'to set your username'))
if "\n" in user:
raise error.Abort(_("username %s contains a newline\n")
% repr(user))
return user
def shortuser(self, user):
"""Return a short representation of a user name or email address."""
if not self.verbose:
user = util.shortuser(user)
return user
def expandpath(self, loc, default=None):
"""Return repository location relative to cwd or from [paths]"""
try:
p = self.paths.getpath(loc)
if p:
return p.rawloc
except error.RepoError:
pass
if default:
try:
p = self.paths.getpath(default)
if p:
return p.rawloc
except error.RepoError:
pass
return loc
@util.propertycache
def paths(self):
return paths(self)
def pushbuffer(self, error=False, subproc=False, labeled=False):
"""install a buffer to capture standard output of the ui object
If error is True, the error output will be captured too.
If subproc is True, output from subprocesses (typically hooks) will be
captured too.
If labeled is True, any labels associated with buffered
output will be handled. By default, this has no effect
on the output returned, but extensions and GUI tools may
handle this argument and returned styled output. If output
is being buffered so it can be captured and parsed or
processed, labeled should not be set to True.
"""
self._buffers.append([])
self._bufferstates.append((error, subproc, labeled))
self._bufferapplylabels = labeled
def popbuffer(self):
'''pop the last buffer and return the buffered output'''
self._bufferstates.pop()
if self._bufferstates:
self._bufferapplylabels = self._bufferstates[-1][2]
else:
self._bufferapplylabels = None
return "".join(self._buffers.pop())
def write(self, *args, **opts):
'''write args to output
By default, this method simply writes to the buffer or stdout,
but extensions or GUI tools may override this method,
write_err(), popbuffer(), and label() to style output from
various parts of hg.
An optional keyword argument, "label", can be passed in.
This should be a string containing label names separated by
space. Label names take the form of "topic.type". For example,
ui.debug() issues a label of "ui.debug".
When labeling output for a specific command, a label of
"cmdname.type" is recommended. For example, status issues
a label of "status.modified" for modified files.
'''
if self._buffers:
self._buffers[-1].extend(a for a in args)
else:
self._progclear()
for a in args:
self.fout.write(a)
def write_err(self, *args, **opts):
self._progclear()
try:
if self._bufferstates and self._bufferstates[-1][0]:
return self.write(*args, **opts)
if not getattr(self.fout, 'closed', False):
self.fout.flush()
for a in args:
self.ferr.write(a)
# stderr may be buffered under win32 when redirected to files,
# including stdout.
if not getattr(self.ferr, 'closed', False):
self.ferr.flush()
except IOError as inst:
if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
raise
def flush(self):
try: self.fout.flush()
except (IOError, ValueError): pass
try: self.ferr.flush()
except (IOError, ValueError): pass
def _isatty(self, fh):
if self.configbool('ui', 'nontty', False):
return False
return util.isatty(fh)
def interactive(self):
'''is interactive input allowed?
An interactive session is a session where input can be reasonably read
from `sys.stdin'. If this function returns false, any attempt to read
from stdin should fail with an error, unless a sensible default has been
specified.
Interactiveness is triggered by the value of the `ui.interactive'
configuration variable or - if it is unset - when `sys.stdin' points
to a terminal device.
This function refers to input only; for output, see `ui.formatted()'.
'''
i = self.configbool("ui", "interactive", None)
if i is None:
# some environments replace stdin without implementing isatty
# usually those are non-interactive
return self._isatty(self.fin)
return i
def termwidth(self):
'''how wide is the terminal in columns?
'''
if 'COLUMNS' in os.environ:
try:
return int(os.environ['COLUMNS'])
except ValueError:
pass
return util.termwidth()
def formatted(self):
'''should formatted output be used?
It is often desirable to format the output to suite the output medium.
Examples of this are truncating long lines or colorizing messages.
However, this is not often not desirable when piping output into other
utilities, e.g. `grep'.
Formatted output is triggered by the value of the `ui.formatted'
configuration variable or - if it is unset - when `sys.stdout' points
to a terminal device. Please note that `ui.formatted' should be
considered an implementation detail; it is not intended for use outside
Mercurial or its extensions.
This function refers to output only; for input, see `ui.interactive()'.
This function always returns false when in plain mode, see `ui.plain()'.
'''
if self.plain():
return False
i = self.configbool("ui", "formatted", None)
if i is None:
# some environments replace stdout without implementing isatty
# usually those are non-interactive
return self._isatty(self.fout)
return i
def _readline(self, prompt=''):
if self._isatty(self.fin):
try:
# magically add command line editing support, where
# available
import readline
# force demandimport to really load the module
readline.read_history_file
# windows sometimes raises something other than ImportError
except Exception:
pass
# call write() so output goes through subclassed implementation
# e.g. color extension on Windows
self.write(prompt)
# instead of trying to emulate raw_input, swap (self.fin,
# self.fout) with (sys.stdin, sys.stdout)
oldin = sys.stdin
oldout = sys.stdout
sys.stdin = self.fin
sys.stdout = self.fout
# prompt ' ' must exist; otherwise readline may delete entire line
# - http://bugs.python.org/issue12833
line = raw_input(' ')
sys.stdin = oldin
sys.stdout = oldout
# When stdin is in binary mode on Windows, it can cause
# raw_input() to emit an extra trailing carriage return
if os.linesep == '\r\n' and line and line[-1] == '\r':
line = line[:-1]
return line
def prompt(self, msg, default="y"):
"""Prompt user with msg, read response.
If ui is not interactive, the default is returned.
"""
if not self.interactive():
self.write(msg, ' ', default or '', "\n")
return default
try:
r = self._readline(self.label(msg, 'ui.prompt'))
if not r:
r = default
if self.configbool('ui', 'promptecho'):
self.write(r, "\n")
return r
except EOFError:
raise error.ResponseExpected()
@staticmethod
def extractchoices(prompt):
"""Extract prompt message and list of choices from specified prompt.
This returns tuple "(message, choices)", and "choices" is the
list of tuple "(response character, text without &)".
>>> ui.extractchoices("awake? $$ &Yes $$ &No")
('awake? ', [('y', 'Yes'), ('n', 'No')])
>>> ui.extractchoices("line\\nbreak? $$ &Yes $$ &No")
('line\\nbreak? ', [('y', 'Yes'), ('n', 'No')])
>>> ui.extractchoices("want lots of $$money$$?$$Ye&s$$N&o")
('want lots of $$money$$?', [('s', 'Yes'), ('o', 'No')])
"""
# Sadly, the prompt string may have been built with a filename
# containing "$$" so let's try to find the first valid-looking
# prompt to start parsing. Sadly, we also can't rely on
# choices containing spaces, ASCII, or basically anything
# except an ampersand followed by a character.
m = re.match(r'(?s)(.+?)\$\$([^\$]*&[^ \$].*)', prompt)
msg = m.group(1)
choices = [p.strip(' ') for p in m.group(2).split('$$')]
return (msg,
[(s[s.index('&') + 1].lower(), s.replace('&', '', 1))
for s in choices])
def promptchoice(self, prompt, default=0):
"""Prompt user with a message, read response, and ensure it matches
one of the provided choices. The prompt is formatted as follows:
"would you like fries with that (Yn)? $$ &Yes $$ &No"
The index of the choice is returned. Responses are case
insensitive. If ui is not interactive, the default is
returned.
"""
msg, choices = self.extractchoices(prompt)
resps = [r for r, t in choices]
while True:
r = self.prompt(msg, resps[default])
if r.lower() in resps:
return resps.index(r.lower())
self.write(_("unrecognized response\n"))
def getpass(self, prompt=None, default=None):
if not self.interactive():
return default
try:
self.write_err(self.label(prompt or _('password: '), 'ui.prompt'))
# disable getpass() only if explicitly specified. it's still valid
# to interact with tty even if fin is not a tty.
if self.configbool('ui', 'nontty'):
return self.fin.readline().rstrip('\n')
else:
return getpass.getpass('')
except EOFError:
raise error.ResponseExpected()
def status(self, *msg, **opts):
'''write status message to output (if ui.quiet is False)
This adds an output label of "ui.status".
'''
if not self.quiet:
opts['label'] = opts.get('label', '') + ' ui.status'
self.write(*msg, **opts)
def warn(self, *msg, **opts):
'''write warning message to output (stderr)
This adds an output label of "ui.warning".
'''
opts['label'] = opts.get('label', '') + ' ui.warning'
self.write_err(*msg, **opts)
def note(self, *msg, **opts):
'''write note to output (if ui.verbose is True)
This adds an output label of "ui.note".
'''
if self.verbose:
opts['label'] = opts.get('label', '') + ' ui.note'
self.write(*msg, **opts)
def debug(self, *msg, **opts):
'''write debug message to output (if ui.debugflag is True)
This adds an output label of "ui.debug".
'''
if self.debugflag:
opts['label'] = opts.get('label', '') + ' ui.debug'
self.write(*msg, **opts)
def edit(self, text, user, extra=None, editform=None, pending=None):
extra_defaults = { 'prefix': 'editor' }
if extra is not None:
extra_defaults.update(extra)
extra = extra_defaults
(fd, name) = tempfile.mkstemp(prefix='hg-' + extra['prefix'] + '-',
suffix=".txt", text=True)
try:
f = os.fdopen(fd, "w")
f.write(text)
f.close()
environ = {'HGUSER': user}
if 'transplant_source' in extra:
environ.update({'HGREVISION': hex(extra['transplant_source'])})
for label in ('intermediate-source', 'source', 'rebase_source'):
if label in extra:
environ.update({'HGREVISION': extra[label]})
break
if editform:
environ.update({'HGEDITFORM': editform})
if pending:
environ.update({'HG_PENDING': pending})
editor = self.geteditor()
self.system("%s \"%s\"" % (editor, name),
environ=environ,
onerr=error.Abort, errprefix=_("edit failed"))
f = open(name)
t = f.read()
f.close()
finally:
os.unlink(name)
return t
def system(self, cmd, environ=None, cwd=None, onerr=None, errprefix=None):
'''execute shell command with appropriate output stream. command
output will be redirected if fout is not stdout.
'''
out = self.fout
if any(s[1] for s in self._bufferstates):
out = self
return util.system(cmd, environ=environ, cwd=cwd, onerr=onerr,
errprefix=errprefix, out=out)
def traceback(self, exc=None, force=False):
'''print exception traceback if traceback printing enabled or forced.
only to call in exception handler. returns true if traceback
printed.'''
if self.tracebackflag or force:
if exc is None:
exc = sys.exc_info()
cause = getattr(exc[1], 'cause', None)
if cause is not None:
causetb = traceback.format_tb(cause[2])
exctb = traceback.format_tb(exc[2])
exconly = traceback.format_exception_only(cause[0], cause[1])
# exclude frame where 'exc' was chained and rethrown from exctb
self.write_err('Traceback (most recent call last):\n',
''.join(exctb[:-1]),
''.join(causetb),
''.join(exconly))
else:
output = traceback.format_exception(exc[0], exc[1], exc[2])
self.write_err(''.join(output))
return self.tracebackflag or force
def geteditor(self):
'''return editor to use'''
if sys.platform == 'plan9':
# vi is the MIPS instruction simulator on Plan 9. We
# instead default to E to plumb commit messages to
# avoid confusion.
editor = 'E'
else:
editor = 'vi'
return (os.environ.get("HGEDITOR") or
self.config("ui", "editor") or
os.environ.get("VISUAL") or
os.environ.get("EDITOR", editor))
@util.propertycache
def _progbar(self):
"""setup the progbar singleton to the ui object"""
if (self.quiet or self.debugflag
or self.configbool('progress', 'disable', False)
or not progress.shouldprint(self)):
return None
return getprogbar(self)
def _progclear(self):
"""clear progress bar output if any. use it before any output"""
if '_progbar' not in vars(self): # nothing loaded yet
return
if self._progbar is not None and self._progbar.printed:
self._progbar.clear()
def progress(self, topic, pos, item="", unit="", total=None):
'''show a progress message
With stock hg, this is simply a debug message that is hidden
by default, but with extensions or GUI tools it may be
visible. 'topic' is the current operation, 'item' is a
non-numeric marker of the current position (i.e. the currently
in-process file), 'pos' is the current numeric position (i.e.
revision, bytes, etc.), unit is a corresponding unit label,
and total is the highest expected pos.
Multiple nested topics may be active at a time.
All topics should be marked closed by setting pos to None at
termination.
'''
if self._progbar is not None:
self._progbar.progress(topic, pos, item=item, unit=unit,
total=total)
if pos is None or not self.configbool('progress', 'debug'):
return
if unit:
unit = ' ' + unit
if item:
item = ' ' + item
if total:
pct = 100.0 * pos / total
self.debug('%s:%s %s/%s%s (%4.2f%%)\n'
% (topic, item, pos, total, unit, pct))
else:
self.debug('%s:%s %s%s\n' % (topic, item, pos, unit))
def log(self, service, *msg, **opts):
'''hook for logging facility extensions
service should be a readily-identifiable subsystem, which will
allow filtering.
*msg should be a newline-terminated format string to log, and
then any values to %-format into that format string.
**opts currently has no defined meanings.
'''
def label(self, msg, label):
'''style msg based on supplied label
Like ui.write(), this just returns msg unchanged, but extensions
and GUI tools can override it to allow styling output without
writing it.
ui.write(s, 'label') is equivalent to
ui.write(ui.label(s, 'label')).
'''
return msg
def develwarn(self, msg, stacklevel=1):
"""issue a developer warning message
Use 'stacklevel' to report the offender some layers further up in the
stack.
"""
msg = 'devel-warn: ' + msg
stacklevel += 1 # get in develwarn
if self.tracebackflag:
util.debugstacktrace(msg, stacklevel, self.ferr, self.fout)
else:
curframe = inspect.currentframe()
calframe = inspect.getouterframes(curframe, 2)
self.write_err('%s at: %s:%s (%s)\n'
% ((msg,) + calframe[stacklevel][1:4]))
def deprecwarn(self, msg, version):
"""issue a deprecation warning
- msg: message explaining what is deprecated and how to upgrade,
- version: last version where the API will be supported,
"""
msg += ("\n(compatibility will be dropped after Mercurial-%s,"
" update your code.)") % version
self.develwarn(msg, stacklevel=2)
class paths(dict):
"""Represents a collection of paths and their configs.
Data is initially derived from ui instances and the config files they have
loaded.
"""
def __init__(self, ui):
dict.__init__(self)
for name, loc in ui.configitems('paths', ignoresub=True):
# No location is the same as not existing.
if not loc:
continue
loc, sub = ui.configsuboptions('paths', name)
self[name] = path(ui, name, rawloc=loc, suboptions=sub)
def getpath(self, name, default=None):
"""Return a ``path`` from a string, falling back to default.
``name`` can be a named path or locations. Locations are filesystem
paths or URIs.
Returns None if ``name`` is not a registered path, a URI, or a local
path to a repo.
"""
# Only fall back to default if no path was requested.
if name is None:
if not default:
default = ()
elif not isinstance(default, (tuple, list)):
default = (default,)
for k in default:
try:
return self[k]
except KeyError:
continue
return None
# Most likely empty string.
# This may need to raise in the future.
if not name:
return None
try:
return self[name]
except KeyError:
# Try to resolve as a local path or URI.
try:
# We don't pass sub-options in, so no need to pass ui instance.
return path(None, None, rawloc=name)
except ValueError:
raise error.RepoError(_('repository %s does not exist') %
name)
_pathsuboptions = {}
def pathsuboption(option, attr):
"""Decorator used to declare a path sub-option.
Arguments are the sub-option name and the attribute it should set on
``path`` instances.
The decorated function will receive as arguments a ``ui`` instance,
``path`` instance, and the string value of this option from the config.
The function should return the value that will be set on the ``path``
instance.
This decorator can be used to perform additional verification of
sub-options and to change the type of sub-options.
"""
def register(func):
_pathsuboptions[option] = (attr, func)
return func
return register
@pathsuboption('pushurl', 'pushloc')
def pushurlpathoption(ui, path, value):
u = util.url(value)
# Actually require a URL.
if not u.scheme:
ui.warn(_('(paths.%s:pushurl not a URL; ignoring)\n') % path.name)
return None
# Don't support the #foo syntax in the push URL to declare branch to
# push.
if u.fragment:
ui.warn(_('("#fragment" in paths.%s:pushurl not supported; '
'ignoring)\n') % path.name)
u.fragment = None
return str(u)
class path(object):
"""Represents an individual path and its configuration."""
def __init__(self, ui, name, rawloc=None, suboptions=None):
"""Construct a path from its config options.
``ui`` is the ``ui`` instance the path is coming from.
``name`` is the symbolic name of the path.
``rawloc`` is the raw location, as defined in the config.
``pushloc`` is the raw locations pushes should be made to.
If ``name`` is not defined, we require that the location be a) a local
filesystem path with a .hg directory or b) a URL. If not,
``ValueError`` is raised.
"""
if not rawloc:
raise ValueError('rawloc must be defined')
# Locations may define branches via syntax <base>#<branch>.
u = util.url(rawloc)
branch = None
if u.fragment:
branch = u.fragment
u.fragment = None
self.url = u
self.branch = branch
self.name = name
self.rawloc = rawloc
self.loc = str(u)
# When given a raw location but not a symbolic name, validate the
# location is valid.
if not name and not u.scheme and not self._isvalidlocalpath(self.loc):
raise ValueError('location is not a URL or path to a local '
'repo: %s' % rawloc)
suboptions = suboptions or {}
# Now process the sub-options. If a sub-option is registered, its
# attribute will always be present. The value will be None if there
# was no valid sub-option.
for suboption, (attr, func) in _pathsuboptions.iteritems():
if suboption not in suboptions:
setattr(self, attr, None)
continue
value = func(ui, self, suboptions[suboption])
setattr(self, attr, value)
def _isvalidlocalpath(self, path):
"""Returns True if the given path is a potentially valid repository.
This is its own function so that extensions can change the definition of
'valid' in this case (like when pulling from a git repo into a hg
one)."""
return os.path.isdir(os.path.join(path, '.hg'))
@property
def suboptions(self):
"""Return sub-options and their values for this path.
This is intended to be used for presentation purposes.
"""
d = {}
for subopt, (attr, _func) in _pathsuboptions.iteritems():
value = getattr(self, attr)
if value is not None:
d[subopt] = value
return d
# we instantiate one globally shared progress bar to avoid
# competing progress bars when multiple UI objects get created
_progresssingleton = None
def getprogbar(ui):
global _progresssingleton
if _progresssingleton is None:
# passing 'ui' object to the singleton is fishy,
# this is how the extension used to work but feel free to rework it.
_progresssingleton = progress.progbar(ui)
return _progresssingleton
|
gpl-2.0
| -7,963,893,888,895,642,000
| 35.209026
| 80
| 0.551233
| false
|
F5Networks/f5-common-python
|
f5/bigip/tm/asm/test/unit/test_policy_template.py
|
1
|
1931
|
# Copyright 2017 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import mock
import pytest
from f5.bigip import ManagementRoot
from f5.bigip.tm.asm.policy_templates import Policy_Template
from f5.sdk_exception import UnsupportedOperation
from six import iterkeys
@pytest.fixture
def FakePolicyTemplate():
fake_asm = mock.MagicMock()
fake_tmpl = Policy_Template(fake_asm)
fake_tmpl._meta_data['bigip'].tmos_version = '11.6.0'
return fake_tmpl
class TestPolicyTemplates(object):
def test_create_raises(self, FakePolicyTemplate):
with pytest.raises(UnsupportedOperation):
FakePolicyTemplate.create()
def test_modify_raises(self, FakePolicyTemplate):
with pytest.raises(UnsupportedOperation):
FakePolicyTemplate.modify()
def test_delete_raises(self, FakePolicyTemplate):
with pytest.raises(UnsupportedOperation):
FakePolicyTemplate.delete()
def test_collection(self, fakeicontrolsession):
b = ManagementRoot('192.168.1.1', 'admin', 'admin')
t = b.tm.asm.policy_templates_s
test_meta = t._meta_data['attribute_registry']
test_meta2 = t._meta_data['allowed_lazy_attributes']
kind = 'tm:asm:policy-templates:policy-templatestate'
assert kind in list(iterkeys(test_meta))
assert Policy_Template in test_meta2
assert t._meta_data['object_has_stats'] is False
|
apache-2.0
| 1,093,821,788,481,558,000
| 34.759259
| 74
| 0.718799
| false
|
stepjam/PyRep
|
pyrep/robots/configuration_paths/holonomic_configuration_path.py
|
1
|
2699
|
from pyrep.backend import sim, utils
from pyrep.robots.configuration_paths.mobile_configuration_path import (
MobileConfigurationPath)
from pyrep.const import PYREP_SCRIPT_TYPE
from math import sqrt
class HolonomicConfigurationPath(MobileConfigurationPath):
"""A path expressed in joint configuration space.
Paths are retrieved from an :py:class:`Mobile`, and are associated with the
mobile base that generated the path.
This class is used for executing motion along a path via the
_get_base_actuation function employing a proportional controller.
"""
def step(self) -> bool:
"""Make a step along the trajectory.
Step forward by calling _get_base_actuation to get the velocity needed
to be applied at the wheels.
NOTE: This does not step the physics engine. This is left to the user.
:return: If the end of the trajectory has been reached.
"""
if self._path_done:
raise RuntimeError('This path has already been completed. '
'If you want to re-run, then call set_to_start.')
pos_inter = self._mobile.intermediate_target_base.get_position(
relative_to=self._mobile)
if len(self._path_points) > 2: # Non-linear path
if self.inter_done:
self._next_i_path()
self._set_inter_target(self.i_path)
self.inter_done = False
handleBase = self._mobile.get_handle()
handleInterTargetBase = self._mobile.intermediate_target_base.get_handle()
__, ret_floats, _, _ = utils.script_call(
'getBoxAdjustedMatrixAndFacingAngle@PyRep',
PYREP_SCRIPT_TYPE,
ints=[handleBase, handleInterTargetBase])
m = ret_floats[:-1]
angle = ret_floats[-1]
self._mobile.intermediate_target_base.set_position(
[m[3], m[7], self._mobile.target_z])
self._mobile.intermediate_target_base.set_orientation(
[0, 0, angle])
if sqrt((pos_inter[0]) ** 2 + (pos_inter[1]) ** 2) < 0.1:
self.inter_done = True
actuation = [0, 0, 0]
else:
actuation, _ = self._mobile.get_base_actuation()
self._mobile.set_base_angular_velocites(actuation)
if self.i_path == len(self._path_points) - 1:
self._path_done = True
else:
actuation, self._path_done = self._mobile.get_base_actuation()
self._mobile.set_base_angular_velocites(actuation)
return self._path_done
|
mit
| -20,957,035,866,674,664
| 37.014085
| 90
| 0.588366
| false
|
lingthio/Flask-User
|
flask_user/user_mixin.py
|
1
|
4450
|
"""This module implements the UserMixin class for Flask-User.
This Mixin adds required methods to User data-model.
"""
from flask import current_app
from flask_login import UserMixin as FlaskLoginUserMixin
class UserMixin(FlaskLoginUserMixin):
""" This class adds required methods to the User data-model.
Example:
class User(db.Model, UserMixin):
...
"""
def get_id(self):
"""Converts a User ID and parts of a User password hash to a token."""
# This function is used by Flask-Login to store a User ID securely as a browser cookie.
# The last part of the password is included to invalidate tokens when password change.
# user_id and password_ends_with are encrypted, timestamped and signed.
# This function works in tandem with UserMixin.get_user_by_token()
user_manager = current_app.user_manager
user_id = self.id
password_ends_with = '' if user_manager.USER_ENABLE_AUTH0 else self.password[-8:]
user_token = user_manager.generate_token(
user_id, # User ID
password_ends_with, # Last 8 characters of user password
)
# print("UserMixin.get_id: ID:", self.id, "token:", user_token)
return user_token
@classmethod
def get_user_by_token(cls, token, expiration_in_seconds=None):
# This function works in tandem with UserMixin.get_id()
# Token signatures and timestamps are verified.
# user_id and password_ends_with are decrypted.
# Verifies a token and decrypts a User ID and parts of a User password hash
user_manager = current_app.user_manager
data_items = user_manager.verify_token(token, expiration_in_seconds)
# Verify password_ends_with
token_is_valid = False
if data_items:
# Load user by User ID
user_id = data_items[0]
password_ends_with = data_items[1]
user = user_manager.db_manager.get_user_by_id(user_id)
user_password = '' if user_manager.USER_ENABLE_AUTH0 else user.password[-8:]
# Make sure that last 8 characters of user password matches
token_is_valid = user and user_password==password_ends_with
return user if token_is_valid else None
def has_roles(self, *requirements):
""" Return True if the user has all of the specified roles. Return False otherwise.
has_roles() accepts a list of requirements:
has_role(requirement1, requirement2, requirement3).
Each requirement is either a role_name, or a tuple_of_role_names.
role_name example: 'manager'
tuple_of_role_names: ('funny', 'witty', 'hilarious')
A role_name-requirement is accepted when the user has this role.
A tuple_of_role_names-requirement is accepted when the user has ONE of these roles.
has_roles() returns true if ALL of the requirements have been accepted.
For example:
has_roles('a', ('b', 'c'), d)
Translates to:
User has role 'a' AND (role 'b' OR role 'c') AND role 'd'"""
# Translates a list of role objects to a list of role_names
user_manager = current_app.user_manager
role_names = user_manager.db_manager.get_user_roles(self)
# has_role() accepts a list of requirements
for requirement in requirements:
if isinstance(requirement, (list, tuple)):
# this is a tuple_of_role_names requirement
tuple_of_role_names = requirement
authorized = False
for role_name in tuple_of_role_names:
if role_name in role_names:
# tuple_of_role_names requirement was met: break out of loop
authorized = True
break
if not authorized:
return False # tuple_of_role_names requirement failed: return False
else:
# this is a role_name requirement
role_name = requirement
# the user must have this role
if not role_name in role_names:
return False # role_name requirement failed: return False
# All requirements have been met: return True
return True
|
mit
| 2,653,800,167,023,835,600
| 42.203883
| 106
| 0.602921
| false
|
timgraham/django-cms
|
cms/utils/__init__.py
|
1
|
3222
|
# -*- coding: utf-8 -*-
# TODO: this is just stuff from utils.py - should be splitted / moved
from django.conf import settings
from django.core.files.storage import get_storage_class
from django.utils.functional import LazyObject
from cms import constants
from cms.utils.conf import get_cms_setting
from cms.utils.conf import get_site_id # nopyflakes
from cms.utils.i18n import get_default_language
from cms.utils.i18n import get_language_list
from cms.utils.i18n import get_language_code
def get_template_from_request(request, obj=None, no_current_page=False):
"""
Gets a valid template from different sources or falls back to the default
template.
"""
template = None
if len(get_cms_setting('TEMPLATES')) == 1:
return get_cms_setting('TEMPLATES')[0][0]
if hasattr(request, 'POST') and "template" in request.POST:
template = request.POST['template']
elif hasattr(request, 'GET') and "template" in request.GET:
template = request.GET['template']
if not template and obj is not None:
template = obj.get_template()
if not template and not no_current_page and hasattr(request, "current_page"):
current_page = request.current_page
if hasattr(current_page, "get_template"):
template = current_page.get_template()
if template is not None and template in dict(get_cms_setting('TEMPLATES')).keys():
if template == constants.TEMPLATE_INHERITANCE_MAGIC and obj:
# Happens on admin's request when changing the template for a page
# to "inherit".
return obj.get_template()
return template
return get_cms_setting('TEMPLATES')[0][0]
def get_language_from_request(request, current_page=None):
"""
Return the most obvious language according the request
"""
language = None
if hasattr(request, 'POST'):
language = request.POST.get('language', None)
if hasattr(request, 'GET') and not language:
language = request.GET.get('language', None)
site_id = current_page.site_id if current_page else None
if language:
language = get_language_code(language)
if not language in get_language_list(site_id):
language = None
if not language:
language = get_language_code(getattr(request, 'LANGUAGE_CODE', None))
if language:
if not language in get_language_list(site_id):
language = None
if not language and current_page:
# in last resort, get the first language available in the page
languages = current_page.get_languages()
if len(languages) > 0:
language = languages[0]
if not language:
# language must be defined in CMS_LANGUAGES, so check first if there
# is any language with LANGUAGE_CODE, otherwise try to split it and find
# best match
language = get_default_language(site_id=site_id)
return language
default_storage = 'django.contrib.staticfiles.storage.StaticFilesStorage'
class ConfiguredStorage(LazyObject):
def _setup(self):
self._wrapped = get_storage_class(getattr(settings, 'STATICFILES_STORAGE', default_storage))()
configured_storage = ConfiguredStorage()
|
bsd-3-clause
| -981,943,898,858,118,300
| 37.357143
| 102
| 0.678461
| false
|
diegotoral/season-hunter
|
SeasonHunter/config.py
|
1
|
1188
|
"""
Copyright (C) 2012 Caio Oliveira <caio.freitaso@gmail.com>
Copyright (C) 2012 Diego Toral <diegotoral@gmail.com>
Copyright (C) 2012 Thiago Abreu <thiagoa7@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import os
__version__ = "0.1"
__license__ = "GPL v3.0"
__data_dir__ = "../data/"
def get_data_path():
path = os.path.abspath(os.path.join(os.path.dirname(__file__), __data_dir__))
if os.path.exists(path):
return path
def get_ui_path():
return os.path.join(get_data_path(), "ui/")
def get_media_path():
return os.path.join(get_data_path(), "media/")
def get_version():
return __version__
|
gpl-3.0
| 4,582,873,267,735,139,000
| 29.461538
| 78
| 0.726431
| false
|
pyfa-org/eos
|
tests/integration/container/unordered/test_type_unique_set.py
|
1
|
8045
|
# ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos import Fit
from eos import Implant
from eos import Skill
from tests.integration.container.testcase import ContainerTestCase
class TestContainerTypeUniqueSet(ContainerTestCase):
def test_add_none(self):
fit = Fit()
# Action
with self.assertRaises(TypeError):
fit.skills.add(None)
# Verification
self.assertEqual(len(fit.skills), 0)
# Cleanup
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_add_item(self):
fit = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
# Action
fit.skills.add(item)
# Verification
self.assertEqual(len(fit.skills), 1)
self.assertIs(fit.skills[item_type.id], item)
self.assertIn(item, fit.skills)
self.assertIn(item_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_add_item_type_failure(self):
fit = Fit()
item_type = self.mktype()
item = Implant(item_type.id)
# Action
with self.assertRaises(TypeError):
fit.skills.add(item)
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item, fit.skills)
self.assertNotIn(item_type.id, fit.skills)
fit.implants.add(item)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_add_item_value_failure_has_fit(self):
fit = Fit()
fit_other = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
fit_other.skills.add(item)
# Action
with self.assertRaises(ValueError):
fit.skills.add(item)
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertEqual(len(fit_other.skills), 1)
self.assertIs(fit_other.skills[item_type.id], item)
self.assertIn(item, fit_other.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_solsys_buffers_empty(fit_other.solar_system)
self.assert_log_entries(0)
def test_add_item_value_failure_existing_type_id(self):
fit = Fit()
item_type = self.mktype()
item1 = Skill(item_type.id)
item2 = Skill(item_type.id)
fit.skills.add(item1)
# Action
with self.assertRaises(ValueError):
fit.skills.add(item2)
# Verification
self.assertEqual(len(fit.skills), 1)
self.assertIs(fit.skills[item_type.id], item1)
self.assertIn(item1, fit.skills)
self.assertIn(item_type.id, fit.skills)
fit.skills.remove(item1)
fit.skills.add(item2)
# Cleanup
self.assert_item_buffers_empty(item1)
self.assert_item_buffers_empty(item2)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_remove_item(self):
fit = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
fit.skills.add(item)
# Action
fit.skills.remove(item)
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item, fit.skills)
self.assertNotIn(item_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_remove_item_failure(self):
fit = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
# Action
with self.assertRaises(KeyError):
fit.skills.remove(item)
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item, fit.skills)
self.assertNotIn(item_type.id, fit.skills)
fit.skills.add(item)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_delitem_item(self):
fit = Fit()
item_type = self.mktype()
item = Skill(item_type.id)
fit.skills.add(item)
# Action
del fit.skills[item_type.id]
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item, fit.skills)
self.assertNotIn(item_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_delitem_item_failure(self):
fit = Fit()
item_type = self.mktype()
empty_type_id = self.allocate_type_id()
item = Skill(item_type.id)
fit.skills.add(item)
# Action
with self.assertRaises(KeyError):
del fit.skills[empty_type_id]
# Verification
self.assertEqual(len(fit.skills), 1)
self.assertIn(item, fit.skills)
self.assertIn(item_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_key_integrity(self):
fit = Fit()
item_type = self.mktype()
item1 = Skill(item_type.id)
item2 = Skill(item_type.id)
fit.skills.add(item1)
with self.assertRaises(KeyError):
fit.skills.remove(item2)
# Verification
self.assertIs(fit.skills[item_type.id], item1)
# Cleanup
self.assert_item_buffers_empty(item1)
self.assert_item_buffers_empty(item2)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_clear(self):
fit = Fit()
item1_type = self.mktype()
item1 = Skill(item1_type.id)
item2_type = self.mktype()
item2 = Skill(item2_type.id)
fit.skills.add(item1)
fit.skills.add(item2)
# Action
fit.skills.clear()
# Verification
self.assertEqual(len(fit.skills), 0)
self.assertNotIn(item1, fit.skills)
self.assertNotIn(item1_type.id, fit.skills)
self.assertNotIn(item2, fit.skills)
self.assertNotIn(item2_type.id, fit.skills)
# Cleanup
self.assert_item_buffers_empty(item1)
self.assert_item_buffers_empty(item2)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
def test_bool(self):
fit = Fit()
item = Skill(self.mktype().id)
self.assertIs(bool(fit.skills), False)
fit.skills.add(item)
self.assertIs(bool(fit.skills), True)
fit.skills.remove(item)
self.assertIs(bool(fit.skills), False)
# Cleanup
self.assert_item_buffers_empty(item)
self.assert_solsys_buffers_empty(fit.solar_system)
self.assert_log_entries(0)
|
lgpl-3.0
| 6,906,514,192,450,944,000
| 33.676724
| 80
| 0.606339
| false
|
Germanika/plover
|
plover/dictionary/rtfcre_dict.py
|
1
|
11100
|
# Copyright (c) 2013 Hesky Fisher
# See LICENSE.txt for details.
#
# TODO: Convert non-ascii characters to UTF8
# TODO: What does ^ mean in Eclipse?
# TODO: What does #N mean in Eclipse?
# TODO: convert supported commands from Eclipse
"""Parsing an RTF/CRE dictionary.
RTF/CRE spec:
http://www.legalxml.org/workgroups/substantive/transcripts/cre-spec.htm
"""
import inspect
import codecs
import re
# Python 2/3 compatibility.
from six import get_function_code
from plover import log
from plover.resource import resource_stream
from plover.steno import normalize_steno
from plover.steno_dictionary import StenoDictionary
# TODO: Move dictionary format somewhere more canonical than formatting.
from plover.formatting import META_RE
# A regular expression to capture an individual entry in the dictionary.
DICT_ENTRY_PATTERN = re.compile(r'(?s)(?<!\\){\\\*\\cxs (?P<steno>[^}]+)}' +
r'(?P<translation>.*?)(?:(?<!\\)(?:\r\n|\n))*?'+
r'(?=(?:(?<!\\){\\\*\\cxs [^}]+})|' +
r'(?:(?:(?<!\\)(?:\r\n|\n)\s*)*}\s*\Z))')
class TranslationConverter(object):
"""Convert an RTF/CRE translation into plover's internal format."""
def __init__(self, styles={}):
self.styles = styles
def linenumber(f):
return get_function_code(f[1].__func__).co_firstlineno
handler_funcs = inspect.getmembers(self, inspect.ismethod)
handler_funcs.sort(key=linenumber)
handlers = [self._make_re_handler(f.__doc__, f)
for name, f in handler_funcs
if name.startswith('_re_handle_')]
handlers.append(self._match_nested_command_group)
def handler(s, pos):
for handler in handlers:
result = handler(s, pos)
if result:
return result
return None
self._handler = handler
self._command_pattern = re.compile(
r'(\\\*)?\\([a-z]+)(-?[0-9]+)?[ ]?')
self._multiple_whitespace_pattern = re.compile(r'([ ]{2,})')
# This poorly named variable indicates whether the current context is
# one where commands can be inserted (True) or not (False).
self._whitespace = True
def _make_re_handler(self, pattern, f):
pattern = re.compile(pattern)
def handler(s, pos):
match = pattern.match(s, pos)
if match:
newpos = match.end()
result = f(match)
return (newpos, result)
return None
return handler
def _re_handle_escapedchar(self, m):
r'\\([-\\{}])'
return m.group(1)
def _re_handle_hardspace(self, m):
r'\\~'
return '{^ ^}'
def _re_handle_dash(self, m):
r'\\_'
return '-'
def _re_handle_escaped_newline(self, m):
r'\\\r|\\\n'
return '{#Return}{#Return}'
def _re_handle_infix(self, m):
r'\\cxds ([^{}\\\r\n]+)\\cxds ?'
return '{^%s^}' % m.group(1)
def _re_handle_suffix(self, m):
r'\\cxds ([^{}\\\r\n ]+)'
return '{^%s}' % m.group(1)
def _re_handle_prefix(self, m):
r'([^{}\\\r\n ]+)\\cxds ?'
return '{%s^}' % m.group(1)
def _re_handle_commands(self, m):
r'(\\\*)?\\([a-z]+)(-?[0-9]+)? ?'
command = m.group(2)
arg = m.group(3)
if arg:
arg = int(arg)
if command == 'cxds':
return '{^}'
if command == 'cxfc':
return '{-|}'
if command == 'cxfl':
return '{>}'
if command == 'par':
self.seen_par = True
return '{#Return}{#Return}'
if command == 's':
result = []
if not self.seen_par:
result.append('{#Return}{#Return}')
style_name = self.styles.get(arg, '')
if style_name.startswith('Contin'):
result.append('{^ ^}')
return ''.join(result)
# Unrecognized commands are ignored.
return ''
def _re_handle_simple_command_group(self, m):
r'{(\\\*)?\\([a-z]+)(-?[0-9]+)?[ ]?([^{}]*)}'
ignore = bool(m.group(1))
command = m.group(2)
contents = m.group(4)
if contents is None:
contents = ''
if command == 'cxstit':
# Plover doesn't support stitching.
return self(contents)
if command == 'cxfing':
prev = self._whitespace
self._whitespace = False
result = '{&' + contents + '}'
self._whitespace = prev
return result
if command == 'cxp':
prev = self._whitespace
self._whitespace = False
contents = self(contents)
if contents is None:
return None
self._whitespace = prev
stripped = contents.strip()
if stripped in ['.', '!', '?', ',', ';', ':']:
return '{' + stripped + '}'
if stripped == "'":
return "{^'}"
if stripped in ['-', '/']:
return '{^' + contents + '^}'
# Show unknown punctuation as given.
return '{^' + contents + '^}'
if command == 'cxsvatdictflags' and 'N' in contents:
return '{-|}'
# unrecognized commands
if ignore:
return ''
else:
return self(contents)
def _re_handle_eclipse_command(self, m):
r'({[^\\][^{}]*})'
return m.group()
# caseCATalyst doesn't put punctuation in \cxp so we will treat any
# isolated punctuation at the beginning of the translation as special.
def _re_handle_punctuation(self, m):
r'^([.?!:;,])(?=\s|$)'
if self._whitespace:
result = '{%s}' % m.group(1)
else:
result = m.group(1)
return result
def _re_handle_text(self, m):
r'[^{}\\\r\n]+'
text = m.group()
if self._whitespace:
text = self._multiple_whitespace_pattern.sub(r'{^\1^}', text)
return text
def _get_matching_bracket(self, s, pos):
if s[pos] != '{':
return None
end = len(s)
depth = 1
pos += 1
while pos != end:
c = s[pos]
if c == '{':
depth += 1
elif c == '}':
depth -= 1
if depth == 0:
break
pos += 1
if pos < end and s[pos] == '}':
return pos
return None
def _get_command(self, s, pos):
return self._command_pattern.match(s, pos)
def _match_nested_command_group(self, s, pos):
startpos = pos
endpos = self._get_matching_bracket(s, pos)
if endpos is None:
return None
command_match = self._get_command(s, startpos + 1)
if command_match is None:
return None
ignore = bool(command_match.group(1))
command = command_match.group(2)
if command == 'cxconf':
pos = command_match.end()
last = ''
while pos < endpos:
if s[pos] in ['[', '|', ']']:
pos += 1
continue
if s[pos] == '{':
command_match = self._get_command(s, pos + 1)
if command_match is None:
return None
if command_match.group(2) != 'cxc':
return None
cxc_end = self._get_matching_bracket(s, pos)
if cxc_end is None:
return None
last = s[command_match.end():cxc_end]
pos = cxc_end + 1
continue
return None
return (endpos + 1, self(last))
if ignore:
return (endpos + 1, '')
else:
return (endpos + 1, self(s[command_match.end():endpos]))
def __call__(self, s):
self.seen_par = False
pos = 0
tokens = []
handler = self._handler
end = len(s)
while pos != end:
result = handler(s, pos)
if result is None:
return None
pos = result[0]
token = result[1]
if token is None:
return None
tokens.append(token)
return ''.join(tokens)
STYLESHEET_RE = re.compile(r'(?s){\\s([0-9]+).*?((?:\b\w+\b\s*)+);}')
def load_stylesheet(s):
"""Returns a dictionary mapping a number to a style name."""
return dict((int(k), v) for k, v in STYLESHEET_RE.findall(s))
def load_dictionary(filename):
"""Load an RTF/CRE dictionary."""
with resource_stream(filename) as fp:
s = fp.read().decode('cp1252')
styles = load_stylesheet(s)
d = {}
converter = TranslationConverter(styles)
for m in DICT_ENTRY_PATTERN.finditer(s):
steno = normalize_steno(m.group('steno'))
translation = m.group('translation')
converted = converter(translation)
if converted is not None:
d[steno] = converted
return StenoDictionary(d)
HEADER = ("{\\rtf1\\ansi{\\*\\cxrev100}\\cxdict{\\*\\cxsystem Plover}" +
"{\\stylesheet{\\s0 Normal;}}\r\n")
def format_translation(t):
t = ' '.join([x.strip() for x in META_RE.findall(t) if x.strip()])
t = re.sub(r'{\.}', '{\\cxp. }', t)
t = re.sub(r'{!}', '{\\cxp! }', t)
t = re.sub(r'{\?}', '{\\cxp? }', t)
t = re.sub(r'{\,}', '{\\cxp, }', t)
t = re.sub(r'{:}', '{\\cxp: }', t)
t = re.sub(r'{;}', '{\\cxp; }', t)
t = re.sub(r'{\^}', '\\cxds ', t)
t = re.sub(r'{\^([^^}]*)}', '\\cxds \\1', t)
t = re.sub(r'{([^^}]*)\^}', '\\1\\cxds ', t)
t = re.sub(r'{\^([^^}]*)\^}', '\\cxds \\1\\cxds ', t)
t = re.sub(r'{-\|}', '\\cxfc ', t)
t = re.sub(r'{>}', '\\cxfls ', t)
t = re.sub(r'{ }', ' ', t)
t = re.sub(r'{&([^}]+)}', '{\\cxfing \\1}', t)
t = re.sub(r'{#([^}]+)}', '\\{#\\1\\}', t)
t = re.sub(r'{PLOVER:([a-zA-Z]+)}', '\\{PLOVER:\\1\\}', t)
t = re.sub(r'\\"', '"', t)
return t
# TODO: test this
def save_dictionary(d, fp):
writer = codecs.getwriter('cp1252')(fp)
writer.write(HEADER)
failed_entries = []
for strokes, translation in d.items():
s = '/'.join(strokes)
t = format_translation(translation)
entry = "{\\*\\cxs %s}%s\r\n" % (s, t)
try:
writer.write(entry)
except UnicodeEncodeError:
failed_entries.append(
(s,
translation,
_('Plover does not support Unicode characters in RTF'))
)
writer.write("}\r\n")
return failed_entries
def create_dictionary():
return StenoDictionary()
|
gpl-2.0
| -1,321,490,776,352,012,000
| 30.005587
| 80
| 0.472072
| false
|
OpenBeta/beta
|
apiserver/model.py
|
1
|
9792
|
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.dialects import postgresql
from geoalchemy2 import Geometry
from sqlalchemy import func, ForeignKey, PrimaryKeyConstraint, event, Sequence
from sqlalchemy.schema import DropTable
from sqlalchemy.ext.compiler import compiles
import flask_login
from datetime import datetime
import json
import collections
from key_helper import *
db = SQLAlchemy()
FeatureSet = collections.namedtuple('FeatureSet', 'route, boundary', verbose=True)
class Route(db.Model):
__tablename__ = 'routes'
id = db.Column(db.Integer, primary_key=True)
geo = db.Column(Geometry(geometry_type='POINT', srid=4326), unique=True)
name = db.Column(db.Text, index=True)
grade = db.Column(db.Text)
grade_type = db.Column(db.Text, ForeignKey('grade_types.id'))
properties_json = db.Column(postgresql.JSONB)
def __init__(self, geojson):
self.geo = func.ST_SetSRID(func.ST_GeomFromGeoJSON(json.dumps(geojson['geometry'])), 4326)
self.name = geojson['properties']['name']
if 'grade' in geojson['properties']:
grade = geojson['properties']['grade']
self.grade = grade['value']
self.grade_type = grade['type']
else:
self.grade = ''
self.type = 'unknown'
self.properties_json = geojson['properties'] # store raw data
def __repr__(self):
return '<Route %r>' % self.name
def to_json(self):
return {
"type": "Feature",
"id": "route/{}".format(self.id),
"geometry": json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo))),
"properties": self.properties_json
}
def __eq__(self, other):
"""Override the default Equals behavior"""
if isinstance(other, self.__class__):
lhs = json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo)))
rhs = json.loads(db.session.scalar(func.ST_AsGeoJSON(other.geo)))
return lhs == rhs
return NotImplemented
def __ne__(self, other):
"""Define a non-equality test"""
return not self.__eq__(other)
def __hash__(self):
"""Override the default hash behavior (that returns the id or the object)"""
return hash(self.geo)
class GradeType(db.Model):
__tablename__ = 'grade_types'
id = db.Column(db.Text, primary_key=True, unique=True)
full_name = db.Column(db.Text)
def __init__(self, id, full_name):
self.id = id
self.full_name = full_name
@event.listens_for(GradeType.__table__, 'after_create')
def insert_initial_values(*args, **kwargs):
db.session.add(GradeType(id='unknown', full_name='Type Unknown'))
db.session.add(GradeType(id='yds', full_name='Yosemite Decimal System'))
db.session.add(GradeType(id='v', full_name='Hueco V-scale'))
db.session.commit()
event.listen(GradeType.__table__, 'after_create', insert_initial_values)
class GradeDetail(db.Model):
__tablename__ = 'grade_details'
id = db.Column(db.Text, ForeignKey('grade_types.id'))
value = db.Column(db.Text)
weight = db.Column(db.Integer)
__table_args__ = (PrimaryKeyConstraint(id, weight),)
class Boundary(db.Model):
__tablename__ = 'boundaries'
BOUNDARY_ID_SEQ = Sequence('boundary_id_seq', metadata=db.Model.metadata) # define sequence explicitly
boundary_id = db.Column(db.Integer, primary_key=True, server_default=BOUNDARY_ID_SEQ.next_value())
name = db.Column(db.Text, index=True)
is_top_level = db.Column(db.Boolean)
geo = db.Column(Geometry(geometry_type='POLYGON', srid=4326), unique=True)
properties_json = db.Column(postgresql.JSONB)
sys_period = db.Column(postgresql.TSTZRANGE, nullable=False)
def __init__(self, geojson):
props = geojson['properties']
self.name = props.get('name')
self.is_top_level = props.get('topLevel', False)
self.geo = func.ST_SetSRID(func.ST_GeomFromGeoJSON(json.dumps(geojson['geometry'])), 4326)
self.properties_json = props
def to_json(self):
return {
"type": "Feature",
"id": "area/{}".format(self.boundary_id),
"geometry": json.loads(db.session.scalar(func.ST_AsGeoJSON(self.geo))),
"properties": self.properties_json
}
class BoundaryHistory(db.Model):
__tablename__ = 'boundaries_history'
history_id = db.Column(db.Integer, primary_key=True, autoincrement=True)
boundary_id =db.Column(db.Integer)
name = db.Column(db.Text)
is_top_level = db.Column(db.Boolean)
geo = db.Column(Geometry(geometry_type='POLYGON', srid=4326))
properties_json = db.Column(postgresql.JSONB)
sys_period = db.Column(postgresql.TSTZRANGE, nullable=False)
class APIUser(db.Model, flask_login.UserMixin):
__tablename__ = 'api_users'
uid = db.Column(db.Integer, primary_key=True, autoincrement=True)
email = db.Column(db.Text, primary_key=True, unique=True)
api_key = db.Column(db.Text, primary_key=True, unique=True)
active = db.Column(db.Boolean)
created_ts = db.Column(db.DateTime(timezone=True))
mod_ts = db.Column(db.DateTime(timezone=True))
def __init__(self, **kwargs):
self.active = kwargs['active']
self.email = kwargs['email']
now = datetime.utcnow()
self.created_ts = now
self.mpd_ts = now
self.api_key = genkey(userKeySigner)
@property
def is_active(self):
return self.is_active
@property
def is_authenticated(self):
return True
@property
def apikey(self):
return self.api_key
class AuditLog(db.Model):
__tablename__ = 'audit_log'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
op = db.Column(db.CHAR)
row_id = db.Column(db.Integer)
table_name = db.Column(db.VARCHAR(50))
user_id = db.Column(db.VARCHAR(30), nullable=False)
ip = db.Column(postgresql.INET)
ts = db.Column(db.DateTime(timezone=True))
def get_boundary_by_id(boundary_id):
row = db.session.query(Boundary).filter(Boundary.boundary_id == boundary_id).first()
if row is None:
return None
return row.to_json()
def search_within_boundary_by_id(boundary_id):
rows = db.session.query(Route, Boundary)\
.filter("ST_WITHIN(routes.geo, boundaries.geo)")\
.filter("boundaries.boundary_id=:id")\
.params(id=boundary_id).all()
return {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), rows)
}
def search_within_radius_in_meters(location, radius, route=True, boundary=False):
coordinates = location.split(",")
route_rows = list()
boundary_rows = list()
if route:
route_rows = db.session.query(Route).\
filter('ST_DistanceSphere(geo, ST_MakePoint(:lng,:lat))<=:r').\
params(lng=coordinates[0], lat=coordinates[1], r=radius).all()
if boundary:
boundary_rows = db.session.query(Boundary).\
filter('ST_DistanceSphere(geo, ST_MakePoint(:lng,:lat))<=:r').\
params(lng=coordinates[0], lat=coordinates[1], r=radius).all()
route_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), route_rows)
}
boundary_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), boundary_rows)
}
return FeatureSet(route=route_json, boundary=boundary_json)
def recent_activities(count, route=True, boundary=False):
hard_limit = 10;
route_rows = list()
boundary_rows = list()
if count > hard_limit:
count = hard_limit
if route:
route_rows = db.session.query(Route).\
order_by(Route.id.desc()).\
limit(count);
if boundary:
boundary_rows = db.session.query(Boundary).\
order_by(Boundary.boundary_id.desc()).\
limit(count);
route_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), route_rows)
}
boundary_json = {
"type": "FeatureCollection",
"features": map(lambda item: item.to_json(), boundary_rows)
}
return FeatureSet(route=route_json, boundary=boundary_json)
def setup_temporal_tables():
sql = ("CREATE TRIGGER boundary_history BEFORE INSERT OR UPDATE OR DELETE ON Boundaries "
"FOR EACH ROW EXECUTE PROCEDURE versioning('sys_period', 'boundaries_history', true)")
db.session.execute(sql)
sql = ("create or replace function trxn_history() returns trigger as $$ "
"BEGIN"
" IF (TG_OP = 'DELETE') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('D', OLD.boundary_id, TG_TABLE_NAME, current_setting('vars.edited_by'),now());"
" ELSEIF (TG_OP='UPDATE') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('U', OLD.boundary_id, TG_TABLE_NAME, NEW.properties_json->>'editedBy', now());"
" ELSEIF (TG_OP='INSERT') THEN"
" INSERT INTO audit_log (op, row_id, table_name, user_id, ts) "
" VALUES('I', NEW.boundary_id, TG_TABLE_NAME, NEW.properties_json->>'editedBy', now());"
" END IF;"
" RETURN null;"
"END;"
"$$ language plpgsql;")
db.session.execute(sql)
sql = ("CREATE TRIGGER audit AFTER INSERT OR UPDATE OR DELETE ON boundaries "
"FOR EACH ROW EXECUTE procedure trxn_history();")
db.session.execute(sql)
db.session.commit()
@compiles(DropTable, "postgresql")
def _compile_drop_table(element, compiler, **kwargs):
return compiler.visit_drop_table(element) + " CASCADE"
|
gpl-3.0
| -8,655,785,475,700,712,000
| 33.478873
| 107
| 0.625613
| false
|
simartin/servo
|
tests/wpt/web-platform-tests/tools/ci/tc/tests/test_valid.py
|
1
|
11726
|
import json
import os
from io import open
import sys
import jsone
import mock
import pytest
import requests
import yaml
from jsonschema import validate
here = os.path.dirname(__file__)
root = os.path.abspath(os.path.join(here, "..", "..", "..", ".."))
sys.path.insert(0, root)
from tools.ci.tc import decision
def data_path(filename):
return os.path.join(here, "..", "testdata", filename)
def test_verify_taskcluster_yml():
"""Verify that the json-e in the .taskcluster.yml is valid"""
with open(os.path.join(root, ".taskcluster.yml"), encoding="utf8") as f:
template = yaml.safe_load(f)
events = [("pr_event.json", "github-pull-request", "Pull Request"),
("master_push_event.json", "github-push", "Push to master")]
for filename, tasks_for, title in events:
with open(data_path(filename), encoding="utf8") as f:
event = json.load(f)
context = {"tasks_for": tasks_for,
"event": event,
"as_slugid": lambda x: x}
jsone.render(template, context)
def test_verify_payload():
"""Verify that the decision task produces tasks with a valid payload"""
from tools.ci.tc.decision import decide
r = requests.get("https://community-tc.services.mozilla.com/schemas/queue/v1/create-task-request.json")
r.raise_for_status()
create_task_schema = r.json()
r = requests.get("https://community-tc.services.mozilla.com/references/schemas/docker-worker/v1/payload.json")
r.raise_for_status()
payload_schema = r.json()
jobs = ["lint",
"manifest_upload",
"resources_unittest",
"tools_unittest",
"wpt_integration",
"wptrunner_infrastructure",
"wptrunner_unittest"]
for filename in ["pr_event.json", "master_push_event.json"]:
with open(data_path(filename), encoding="utf8") as f:
event = json.load(f)
with mock.patch("tools.ci.tc.decision.get_fetch_rev", return_value=(None, event["after"], None)):
with mock.patch("tools.ci.tc.decision.get_run_jobs", return_value=set(jobs)):
task_id_map = decide(event)
for name, (task_id, task_data) in task_id_map.items():
try:
validate(instance=task_data, schema=create_task_schema)
validate(instance=task_data["payload"], schema=payload_schema)
except Exception as e:
print("Validation failed for task '%s':\n%s" % (name, json.dumps(task_data, indent=2)))
raise e
@pytest.mark.parametrize("event_path,is_pr,files_changed,expected", [
("master_push_event.json", False, None,
['download-firefox-nightly',
'wpt-firefox-nightly-testharness-1',
'wpt-firefox-nightly-testharness-2',
'wpt-firefox-nightly-testharness-3',
'wpt-firefox-nightly-testharness-4',
'wpt-firefox-nightly-testharness-5',
'wpt-firefox-nightly-testharness-6',
'wpt-firefox-nightly-testharness-7',
'wpt-firefox-nightly-testharness-8',
'wpt-firefox-nightly-testharness-9',
'wpt-firefox-nightly-testharness-10',
'wpt-firefox-nightly-testharness-11',
'wpt-firefox-nightly-testharness-12',
'wpt-firefox-nightly-testharness-13',
'wpt-firefox-nightly-testharness-14',
'wpt-firefox-nightly-testharness-15',
'wpt-firefox-nightly-testharness-16',
'wpt-chrome-dev-testharness-1',
'wpt-chrome-dev-testharness-2',
'wpt-chrome-dev-testharness-3',
'wpt-chrome-dev-testharness-4',
'wpt-chrome-dev-testharness-5',
'wpt-chrome-dev-testharness-6',
'wpt-chrome-dev-testharness-7',
'wpt-chrome-dev-testharness-8',
'wpt-chrome-dev-testharness-9',
'wpt-chrome-dev-testharness-10',
'wpt-chrome-dev-testharness-11',
'wpt-chrome-dev-testharness-12',
'wpt-chrome-dev-testharness-13',
'wpt-chrome-dev-testharness-14',
'wpt-chrome-dev-testharness-15',
'wpt-chrome-dev-testharness-16',
'wpt-firefox-nightly-reftest-1',
'wpt-firefox-nightly-reftest-2',
'wpt-firefox-nightly-reftest-3',
'wpt-firefox-nightly-reftest-4',
'wpt-firefox-nightly-reftest-5',
'wpt-chrome-dev-reftest-1',
'wpt-chrome-dev-reftest-2',
'wpt-chrome-dev-reftest-3',
'wpt-chrome-dev-reftest-4',
'wpt-chrome-dev-reftest-5',
'wpt-firefox-nightly-wdspec-1',
'wpt-firefox-nightly-wdspec-2',
'wpt-chrome-dev-wdspec-1',
'wpt-chrome-dev-wdspec-2',
'wpt-firefox-nightly-crashtest-1',
'wpt-chrome-dev-crashtest-1',
'wpt-firefox-nightly-print-reftest-1',
'wpt-chrome-dev-print-reftest-1',
'lint']),
("pr_event.json", True, {".taskcluster.yml", ".travis.yml", "tools/ci/start.sh"},
['lint',
'tools/ unittests (Python 3.6)',
'tools/ unittests (Python 3.8)',
'tools/ integration tests (Python 3.6)',
'tools/ integration tests (Python 3.8)',
'resources/ tests (Python 3.6)',
'resources/ tests (Python 3.8)',
'download-firefox-nightly',
'infrastructure/ tests (Python 3)',
'sink-task']),
# More tests are affected in the actual PR but it shouldn't affect the scheduled tasks
("pr_event_tests_affected.json", True, {"layout-instability/clip-negative-bottom-margin.html",
"layout-instability/composited-element-movement.html"},
['download-firefox-nightly',
'wpt-firefox-nightly-stability',
'wpt-firefox-nightly-results',
'wpt-firefox-nightly-results-without-changes',
'wpt-chrome-dev-stability',
'wpt-chrome-dev-results',
'wpt-chrome-dev-results-without-changes',
'lint',
'sink-task']),
("epochs_daily_push_event.json", False, None,
['download-firefox-stable',
'wpt-firefox-stable-testharness-1',
'wpt-firefox-stable-testharness-2',
'wpt-firefox-stable-testharness-3',
'wpt-firefox-stable-testharness-4',
'wpt-firefox-stable-testharness-5',
'wpt-firefox-stable-testharness-6',
'wpt-firefox-stable-testharness-7',
'wpt-firefox-stable-testharness-8',
'wpt-firefox-stable-testharness-9',
'wpt-firefox-stable-testharness-10',
'wpt-firefox-stable-testharness-11',
'wpt-firefox-stable-testharness-12',
'wpt-firefox-stable-testharness-13',
'wpt-firefox-stable-testharness-14',
'wpt-firefox-stable-testharness-15',
'wpt-firefox-stable-testharness-16',
'wpt-chrome-nightly-testharness-1',
'wpt-chrome-nightly-testharness-2',
'wpt-chrome-nightly-testharness-3',
'wpt-chrome-nightly-testharness-4',
'wpt-chrome-nightly-testharness-5',
'wpt-chrome-nightly-testharness-6',
'wpt-chrome-nightly-testharness-7',
'wpt-chrome-nightly-testharness-8',
'wpt-chrome-nightly-testharness-9',
'wpt-chrome-nightly-testharness-10',
'wpt-chrome-nightly-testharness-11',
'wpt-chrome-nightly-testharness-12',
'wpt-chrome-nightly-testharness-13',
'wpt-chrome-nightly-testharness-14',
'wpt-chrome-nightly-testharness-15',
'wpt-chrome-nightly-testharness-16',
'wpt-chrome-stable-testharness-1',
'wpt-chrome-stable-testharness-2',
'wpt-chrome-stable-testharness-3',
'wpt-chrome-stable-testharness-4',
'wpt-chrome-stable-testharness-5',
'wpt-chrome-stable-testharness-6',
'wpt-chrome-stable-testharness-7',
'wpt-chrome-stable-testharness-8',
'wpt-chrome-stable-testharness-9',
'wpt-chrome-stable-testharness-10',
'wpt-chrome-stable-testharness-11',
'wpt-chrome-stable-testharness-12',
'wpt-chrome-stable-testharness-13',
'wpt-chrome-stable-testharness-14',
'wpt-chrome-stable-testharness-15',
'wpt-chrome-stable-testharness-16',
'wpt-webkitgtk_minibrowser-nightly-testharness-1',
'wpt-webkitgtk_minibrowser-nightly-testharness-2',
'wpt-webkitgtk_minibrowser-nightly-testharness-3',
'wpt-webkitgtk_minibrowser-nightly-testharness-4',
'wpt-webkitgtk_minibrowser-nightly-testharness-5',
'wpt-webkitgtk_minibrowser-nightly-testharness-6',
'wpt-webkitgtk_minibrowser-nightly-testharness-7',
'wpt-webkitgtk_minibrowser-nightly-testharness-8',
'wpt-webkitgtk_minibrowser-nightly-testharness-9',
'wpt-webkitgtk_minibrowser-nightly-testharness-10',
'wpt-webkitgtk_minibrowser-nightly-testharness-11',
'wpt-webkitgtk_minibrowser-nightly-testharness-12',
'wpt-webkitgtk_minibrowser-nightly-testharness-13',
'wpt-webkitgtk_minibrowser-nightly-testharness-14',
'wpt-webkitgtk_minibrowser-nightly-testharness-15',
'wpt-webkitgtk_minibrowser-nightly-testharness-16',
'wpt-servo-nightly-testharness-1',
'wpt-servo-nightly-testharness-2',
'wpt-servo-nightly-testharness-3',
'wpt-servo-nightly-testharness-4',
'wpt-servo-nightly-testharness-5',
'wpt-servo-nightly-testharness-6',
'wpt-servo-nightly-testharness-7',
'wpt-servo-nightly-testharness-8',
'wpt-servo-nightly-testharness-9',
'wpt-servo-nightly-testharness-10',
'wpt-servo-nightly-testharness-11',
'wpt-servo-nightly-testharness-12',
'wpt-servo-nightly-testharness-13',
'wpt-servo-nightly-testharness-14',
'wpt-servo-nightly-testharness-15',
'wpt-servo-nightly-testharness-16',
'wpt-firefox-stable-reftest-1',
'wpt-firefox-stable-reftest-2',
'wpt-firefox-stable-reftest-3',
'wpt-firefox-stable-reftest-4',
'wpt-firefox-stable-reftest-5',
'wpt-chrome-nightly-reftest-1',
'wpt-chrome-nightly-reftest-2',
'wpt-chrome-nightly-reftest-3',
'wpt-chrome-nightly-reftest-4',
'wpt-chrome-nightly-reftest-5',
'wpt-chrome-stable-reftest-1',
'wpt-chrome-stable-reftest-2',
'wpt-chrome-stable-reftest-3',
'wpt-chrome-stable-reftest-4',
'wpt-chrome-stable-reftest-5',
'wpt-webkitgtk_minibrowser-nightly-reftest-1',
'wpt-webkitgtk_minibrowser-nightly-reftest-2',
'wpt-webkitgtk_minibrowser-nightly-reftest-3',
'wpt-webkitgtk_minibrowser-nightly-reftest-4',
'wpt-webkitgtk_minibrowser-nightly-reftest-5',
'wpt-servo-nightly-reftest-1',
'wpt-servo-nightly-reftest-2',
'wpt-servo-nightly-reftest-3',
'wpt-servo-nightly-reftest-4',
'wpt-servo-nightly-reftest-5',
'wpt-firefox-stable-wdspec-1',
'wpt-firefox-stable-wdspec-2',
'wpt-chrome-nightly-wdspec-1',
'wpt-chrome-nightly-wdspec-2',
'wpt-chrome-stable-wdspec-1',
'wpt-chrome-stable-wdspec-2',
'wpt-webkitgtk_minibrowser-nightly-wdspec-1',
'wpt-webkitgtk_minibrowser-nightly-wdspec-2',
'wpt-servo-nightly-wdspec-1',
'wpt-servo-nightly-wdspec-2',
'wpt-firefox-stable-crashtest-1',
'wpt-chrome-nightly-crashtest-1',
'wpt-chrome-stable-crashtest-1',
'wpt-webkitgtk_minibrowser-nightly-crashtest-1',
'wpt-servo-nightly-crashtest-1',
'wpt-firefox-stable-print-reftest-1',
'wpt-chrome-nightly-print-reftest-1',
'wpt-chrome-stable-print-reftest-1'])
])
def test_schedule_tasks(event_path, is_pr, files_changed, expected):
with mock.patch("tools.ci.tc.decision.get_fetch_rev", return_value=(None, None, None)):
with mock.patch("tools.wpt.testfiles.repo_files_changed",
return_value=files_changed):
with open(data_path(event_path), encoding="utf8") as event_file:
event = json.load(event_file)
scheduled = decision.decide(event)
assert list(scheduled.keys()) == expected
|
mpl-2.0
| -2,762,706,756,789,860,400
| 39.574394
| 114
| 0.647109
| false
|
timorieber/wagtail
|
wagtail/contrib/routable_page/tests.py
|
2
|
12697
|
from unittest import mock
from django.test import RequestFactory, TestCase
from django.test.utils import override_settings
from django.urls.exceptions import NoReverseMatch
from wagtail.contrib.routable_page.templatetags.wagtailroutablepage_tags import routablepageurl
from wagtail.core.models import Page, Site
from wagtail.tests.routablepage.models import (
RoutablePageTest, RoutablePageWithOverriddenIndexRouteTest)
class TestRoutablePage(TestCase):
model = RoutablePageTest
def setUp(self):
self.home_page = Page.objects.get(id=2)
self.routable_page = self.home_page.add_child(instance=self.model(
title="Routable Page",
live=True,
))
def test_resolve_index_route_view(self):
view, args, kwargs = self.routable_page.resolve_subpage('/')
self.assertEqual(view, self.routable_page.index_route)
self.assertEqual(args, ())
self.assertEqual(kwargs, {})
def test_resolve_archive_by_year_view(self):
view, args, kwargs = self.routable_page.resolve_subpage('/archive/year/2014/')
self.assertEqual(view, self.routable_page.archive_by_year)
self.assertEqual(args, ('2014', ))
self.assertEqual(kwargs, {})
def test_resolve_archive_by_author_view(self):
view, args, kwargs = self.routable_page.resolve_subpage('/archive/author/joe-bloggs/')
self.assertEqual(view, self.routable_page.archive_by_author)
self.assertEqual(args, ())
self.assertEqual(kwargs, {'author_slug': 'joe-bloggs'})
def test_resolve_external_view(self):
view, args, kwargs = self.routable_page.resolve_subpage('/external/joe-bloggs/')
self.assertEqual(view, self.routable_page.external_view)
self.assertEqual(args, ('joe-bloggs', ))
self.assertEqual(kwargs, {})
def test_resolve_external_view_other_route(self):
view, args, kwargs = self.routable_page.resolve_subpage('/external-no-arg/')
self.assertEqual(view, self.routable_page.external_view)
self.assertEqual(args, ())
self.assertEqual(kwargs, {})
def test_reverse_index_route_view(self):
url = self.routable_page.reverse_subpage('index_route')
self.assertEqual(url, '')
def test_reverse_archive_by_year_view(self):
url = self.routable_page.reverse_subpage('archive_by_year', args=('2014', ))
self.assertEqual(url, 'archive/year/2014/')
def test_reverse_archive_by_author_view(self):
url = self.routable_page.reverse_subpage('archive_by_author', kwargs={'author_slug': 'joe-bloggs'})
self.assertEqual(url, 'archive/author/joe-bloggs/')
def test_reverse_overridden_name(self):
url = self.routable_page.reverse_subpage('name_overridden')
self.assertEqual(url, 'override-name-test/')
def test_reverse_overridden_name_default_doesnt_work(self):
with self.assertRaises(NoReverseMatch):
self.routable_page.reverse_subpage('override_name_test')
def test_reverse_external_view(self):
url = self.routable_page.reverse_subpage('external_view', args=('joe-bloggs', ))
self.assertEqual(url, 'external/joe-bloggs/')
def test_reverse_external_view_other_route(self):
url = self.routable_page.reverse_subpage('external_view')
self.assertEqual(url, 'external-no-arg/')
def test_get_index_route_view(self):
response = self.client.get(self.routable_page.url)
self.assertContains(response, "DEFAULT PAGE TEMPLATE")
def test_get_routable_page_with_overridden_index_route(self):
page = self.home_page.add_child(
instance=RoutablePageWithOverriddenIndexRouteTest(
title="Routable Page with overridden index",
live=True
)
)
response = self.client.get(page.url)
self.assertContains(response, "OVERRIDDEN INDEX ROUTE")
self.assertNotContains(response, "DEFAULT PAGE TEMPLATE")
def test_get_archive_by_year_view(self):
response = self.client.get(self.routable_page.url + 'archive/year/2014/')
self.assertContains(response, "ARCHIVE BY YEAR: 2014")
def test_earlier_view_takes_precedence(self):
response = self.client.get(self.routable_page.url + 'archive/year/1984/')
self.assertContains(response, "we were always at war with eastasia")
def test_get_archive_by_author_view(self):
response = self.client.get(self.routable_page.url + 'archive/author/joe-bloggs/')
self.assertContains(response, "ARCHIVE BY AUTHOR: joe-bloggs")
def test_get_external_view(self):
response = self.client.get(self.routable_page.url + 'external/joe-bloggs/')
self.assertContains(response, "EXTERNAL VIEW: joe-bloggs")
def test_get_external_view_other_route(self):
response = self.client.get(self.routable_page.url + 'external-no-arg/')
self.assertContains(response, "EXTERNAL VIEW: ARG NOT SET")
def test_routable_page_can_have_instance_bound_descriptors(self):
# This descriptor pretends that it does not exist in the class, hence
# it raises an AttributeError when class bound. This is, for instance,
# the behavior of django's FileFields.
class InstanceDescriptor:
def __get__(self, instance, cls=None):
if instance is None:
raise AttributeError
return 'value'
def __set__(self, instance, value):
raise AttributeError
try:
RoutablePageTest.descriptor = InstanceDescriptor()
RoutablePageTest.get_subpage_urls()
finally:
del RoutablePageTest.descriptor
class TestRoutablePageTemplateTag(TestCase):
def setUp(self):
self.home_page = Page.objects.get(id=2)
self.routable_page = self.home_page.add_child(instance=RoutablePageTest(
title="Routable Page",
live=True,
))
self.rf = RequestFactory()
self.request = self.rf.get(self.routable_page.url)
self.context = {'request': self.request}
def test_templatetag_reverse_index_route(self):
url = routablepageurl(self.context, self.routable_page,
'index_route')
self.assertEqual(url, '/%s/' % self.routable_page.slug)
def test_templatetag_reverse_archive_by_year_view(self):
url = routablepageurl(self.context, self.routable_page,
'archive_by_year', '2014')
self.assertEqual(url, '/%s/archive/year/2014/' % self.routable_page.slug)
def test_templatetag_reverse_archive_by_author_view(self):
url = routablepageurl(self.context, self.routable_page,
'archive_by_author', author_slug='joe-bloggs')
self.assertEqual(url, '/%s/archive/author/joe-bloggs/' % self.routable_page.slug)
def test_templatetag_reverse_external_view(self):
url = routablepageurl(self.context, self.routable_page,
'external_view', 'joe-bloggs')
self.assertEqual(url, '/%s/external/joe-bloggs/' % self.routable_page.slug)
def test_templatetag_reverse_external_view_without_append_slash(self):
with mock.patch('wagtail.core.models.WAGTAIL_APPEND_SLASH', False):
url = routablepageurl(self.context, self.routable_page,
'external_view', 'joe-bloggs')
expected = '/' + self.routable_page.slug + '/' + 'external/joe-bloggs/'
self.assertEqual(url, expected)
@override_settings(ALLOWED_HOSTS=['testserver', 'localhost', 'development.local'])
class TestRoutablePageTemplateTagForSecondSiteAtSameRoot(TestCase):
"""
When multiple sites exist on the same root page, relative URLs within that subtree should
omit the domain, in line with #4390
"""
def setUp(self):
default_site = Site.objects.get(is_default_site=True)
second_site = Site.objects.create( # add another site with the same root page
hostname='development.local',
port=default_site.port,
root_page_id=default_site.root_page_id,
)
self.home_page = Page.objects.get(id=2)
self.routable_page = self.home_page.add_child(instance=RoutablePageTest(
title="Routable Page",
live=True,
))
self.rf = RequestFactory()
self.request = self.rf.get(self.routable_page.url)
self.context = {'request': self.request}
self.request.META['HTTP_HOST'] = second_site.hostname
self.request.META['SERVER_PORT'] = second_site.port
def test_templatetag_reverse_index_route(self):
url = routablepageurl(self.context, self.routable_page,
'index_route')
self.assertEqual(url, '/%s/' % self.routable_page.slug)
def test_templatetag_reverse_archive_by_year_view(self):
url = routablepageurl(self.context, self.routable_page,
'archive_by_year', '2014')
self.assertEqual(url, '/%s/archive/year/2014/' % self.routable_page.slug)
def test_templatetag_reverse_archive_by_author_view(self):
url = routablepageurl(self.context, self.routable_page,
'archive_by_author', author_slug='joe-bloggs')
self.assertEqual(url, '/%s/archive/author/joe-bloggs/' % self.routable_page.slug)
def test_templatetag_reverse_external_view(self):
url = routablepageurl(self.context, self.routable_page,
'external_view', 'joe-bloggs')
self.assertEqual(url, '/%s/external/joe-bloggs/' % self.routable_page.slug)
def test_templatetag_reverse_external_view_without_append_slash(self):
with mock.patch('wagtail.core.models.WAGTAIL_APPEND_SLASH', False):
url = routablepageurl(self.context, self.routable_page,
'external_view', 'joe-bloggs')
expected = '/' + self.routable_page.slug + '/' + 'external/joe-bloggs/'
self.assertEqual(url, expected)
@override_settings(ALLOWED_HOSTS=['testserver', 'localhost', 'events.local'])
class TestRoutablePageTemplateTagForSecondSiteAtDifferentRoot(TestCase):
"""
When multiple sites exist, relative URLs between such sites should include the domain portion
"""
def setUp(self):
self.home_page = Page.objects.get(id=2)
events_page = self.home_page.add_child(instance=Page(title='Events', live=True))
second_site = Site.objects.create(
hostname='events.local',
port=80,
root_page=events_page,
)
self.routable_page = self.home_page.add_child(instance=RoutablePageTest(
title="Routable Page",
live=True,
))
self.rf = RequestFactory()
self.request = self.rf.get(self.routable_page.url)
self.context = {'request': self.request}
self.request.META['HTTP_HOST'] = second_site.hostname
self.request.META['SERVER_PORT'] = second_site.port
def test_templatetag_reverse_index_route(self):
url = routablepageurl(self.context, self.routable_page,
'index_route')
self.assertEqual(url, 'http://localhost/%s/' % self.routable_page.slug)
def test_templatetag_reverse_archive_by_year_view(self):
url = routablepageurl(self.context, self.routable_page,
'archive_by_year', '2014')
self.assertEqual(url, 'http://localhost/%s/archive/year/2014/' % self.routable_page.slug)
def test_templatetag_reverse_archive_by_author_view(self):
url = routablepageurl(self.context, self.routable_page,
'archive_by_author', author_slug='joe-bloggs')
self.assertEqual(url, 'http://localhost/%s/archive/author/joe-bloggs/' % self.routable_page.slug)
def test_templatetag_reverse_external_view(self):
url = routablepageurl(self.context, self.routable_page,
'external_view', 'joe-bloggs')
self.assertEqual(url, 'http://localhost/%s/external/joe-bloggs/' % self.routable_page.slug)
def test_templatetag_reverse_external_view_without_append_slash(self):
with mock.patch('wagtail.core.models.WAGTAIL_APPEND_SLASH', False):
url = routablepageurl(self.context, self.routable_page,
'external_view', 'joe-bloggs')
expected = 'http://localhost/' + self.routable_page.slug + '/' + 'external/joe-bloggs/'
self.assertEqual(url, expected)
|
bsd-3-clause
| 3,030,710,525,895,143,400
| 39.565495
| 107
| 0.643695
| false
|
hguemar/cinder
|
cinder/tests/test_drbdmanagedrv.py
|
1
|
11637
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import mock
from oslo.utils import importutils
from oslo.utils import timeutils
from cinder import context
from cinder.openstack.common import log as logging
from cinder import test
from cinder.volume import configuration as conf
class mock_dbus():
def __init__(self):
pass
@staticmethod
def Array(defaults, signature=None):
return defaults
class mock_dm_utils():
@staticmethod
def dict_to_aux_props(x):
return x
class mock_dm_const():
TQ_GET_PATH = "get_path"
class mock_dm_exc():
DM_SUCCESS = 0
DM_EEXIST = 1
DM_ENOENT = 2
DM_ERROR = 1000
pass
import sys
sys.modules['dbus'] = mock_dbus
sys.modules['drbdmanage'] = collections.namedtuple(
'module', ['consts', 'exceptions', 'utils'])
sys.modules['drbdmanage.utils'] = collections.namedtuple(
'module', ['dict_to_aux_props'])
sys.modules['drbdmanage.consts'] = collections.namedtuple(
'module', [])
sys.modules['drbdmanage.exceptions'] = collections.namedtuple(
'module', ['DM_EEXIST'])
from cinder.volume.drivers.drbdmanagedrv import DrbdManageDriver
LOG = logging.getLogger(__name__)
def create_configuration():
configuration = mock.MockObject(conf.Configuration)
configuration.san_is_local = False
configuration.append_config_values(mock.IgnoreArg())
return configuration
class DrbdManageFakeDriver():
resources = {}
def __init__(self):
self.calls = []
def list_resources(self, res, serial, prop, req):
self.calls.append(["list_resources", res, prop, req])
if 'cinder-id' in prop and prop['cinder-id'].startswith("deadbeef"):
return ([mock_dm_exc.DM_ENOENT, "none", []],
[])
else:
return ([[mock_dm_exc.DM_SUCCESS, "ACK", []]],
[("res", dict(prop))])
def create_resource(self, res, props):
self.calls.append(["create_resource", res, props])
return [[mock_dm_exc.DM_SUCCESS, "ack", []]]
def create_volume(self, res, size, props):
self.calls.append(["create_volume", res, size, props])
return [[mock_dm_exc.DM_SUCCESS, "ack", []]]
def auto_deploy(self, res, red, delta, site_clients):
self.calls.append(["auto_deploy", res, red, delta, site_clients])
return [[mock_dm_exc.DM_SUCCESS, "ack", []] * red]
def list_volumes(self, res, ser, prop, req):
self.calls.append(["list_volumes", res, ser, prop, req])
if 'cinder-id' in prop and prop['cinder-id'].startswith("deadbeef"):
return ([mock_dm_exc.DM_ENOENT, "none", []],
[])
else:
return ([[mock_dm_exc.DM_SUCCESS, "ACK", []]],
[("res", dict(), [(2, dict(prop))])
])
def remove_volume(self, res, nr, force):
self.calls.append(["remove_volume", res, nr, force])
return [[mock_dm_exc.DM_SUCCESS, "ack", []]]
def text_query(self, cmd):
self.calls.append(["text_query", cmd])
if cmd[0] == mock_dm_const.TQ_GET_PATH:
return ([(mock_dm_exc.DM_SUCCESS, "ack", [])], ['/dev/drbd0'])
return ([(mock_dm_exc.DM_ERROR, 'unknown command', [])], [])
def list_assignments(self, nodes, res, ser, prop, req):
self.calls.append(["list_assignments", nodes, res, ser, prop, req])
if 'cinder-id' in prop and prop['cinder-id'].startswith("deadbeef"):
return ([mock_dm_exc.DM_ENOENT, "none", []],
[])
else:
return ([[mock_dm_exc.DM_SUCCESS, "ACK", []]],
[("node", "res", dict(), [(2, dict(prop))])
])
def create_snapshot(self, res, snap, nodes, props):
self.calls.append(["create_snapshot", res, snap, nodes, props])
return [[mock_dm_exc.DM_SUCCESS, "ack", []]]
def list_snapshots(self, res, sn, prop, req):
self.calls.append(["list_snapshots", res, sn, prop, req])
if 'cinder-id' in prop and prop['cinder-id'].startswith("deadbeef"):
return ([mock_dm_exc.DM_ENOENT, "none", []],
[])
else:
return ([[mock_dm_exc.DM_SUCCESS, "ACK", []]],
[("res", [("snap", dict(prop))])
])
def remove_snapshot(self, res, snap, force):
self.calls.append(["remove_snapshot", res, snap, force])
return [[mock_dm_exc.DM_SUCCESS, "ack", []]]
def resize_volume(self, res, vol, ser, size, delta):
self.calls.append(["resize_volume", res, vol, ser, size, delta])
return [[mock_dm_exc.DM_SUCCESS, "ack", []]]
def restore_snapshot(self, res, snap, new, rprop, vprops):
self.calls.append(["restore_snapshot", res, snap, new, rprop, vprops])
return [[mock_dm_exc.DM_SUCCESS, "ack", []]]
class DrbdManageTestCase(test.TestCase):
def setUp(self):
self.ctxt = context.get_admin_context()
self._mock = mock.Mock()
self.configuration = mock.Mock(conf.Configuration)
self.configuration.san_is_local = True
self.configuration.reserved_percentage = 1
super(DrbdManageTestCase, self).setUp()
self.stubs.Set(importutils, 'import_object',
self.fake_import_object)
self.stubs.Set(DrbdManageDriver, 'call_or_reconnect',
self.fake_issue_dbus_call)
self.stubs.Set(DrbdManageDriver, 'dbus_connect',
self.fake_issue_dbus_connect)
sys.modules['cinder.volume.drivers.drbdmanagedrv'].dm_const \
= mock_dm_const
sys.modules['cinder.volume.drivers.drbdmanagedrv'].dm_utils \
= mock_dm_utils
sys.modules['cinder.volume.drivers.drbdmanagedrv'].dm_exc \
= mock_dm_exc
self.configuration.safe_get = lambda x: 'fake'
# Infrastructure
def fake_import_object(self, what, configuration, db, executor):
return None
def fake_issue_dbus_call(self, fn, *args):
return apply(fn, args)
def fake_issue_dbus_connect(self):
self.odm = DrbdManageFakeDriver()
def call_or_reconnect(self, method, *params):
return apply(method, params)
# Tests per se
def test_create_volume(self):
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'ba253fd0-8068-11e4-98c0-5254008ea111',
'volume_type_id': 'drbdmanage',
'created_at': timeutils.utcnow()}
dmd = DrbdManageDriver(configuration=self.configuration)
dmd.odm = DrbdManageFakeDriver()
dmd.create_volume(testvol)
self.assertEqual(dmd.odm.calls[0][0], "create_resource")
self.assertEqual(dmd.odm.calls[1][0], "create_volume")
self.assertEqual(dmd.odm.calls[1][2], 1048576)
self.assertEqual(dmd.odm.calls[2][0], "auto_deploy")
def test_delete_volume(self):
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'ba253fd0-8068-11e4-98c0-5254008ea111',
'volume_type_id': 'drbdmanage',
'created_at': timeutils.utcnow()}
dmd = DrbdManageDriver(configuration=self.configuration)
dmd.odm = DrbdManageFakeDriver()
dmd.delete_volume(testvol)
self.assertEqual(dmd.odm.calls[0][0], "list_volumes")
self.assertEqual(dmd.odm.calls[0][3]["cinder-id"], testvol['id'])
self.assertEqual(dmd.odm.calls[1][0], "remove_volume")
def test_local_path(self):
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'ba253fd0-8068-11e4-98c0-5254008ea111',
'volume_type_id': 'drbdmanage',
'created_at': timeutils.utcnow()}
dmd = DrbdManageDriver(configuration=self.configuration)
dmd.odm = DrbdManageFakeDriver()
data = dmd.local_path(testvol)
self.assertTrue(data.startswith("/dev/drbd"))
def test_create_snapshot(self):
testsnap = {'id': 'ca253fd0-8068-11e4-98c0-5254008ea111',
'volume_id': 'ba253fd0-8068-11e4-98c0-5254008ea111'}
dmd = DrbdManageDriver(configuration=self.configuration)
dmd.odm = DrbdManageFakeDriver()
dmd.create_snapshot(testsnap)
self.assertEqual(dmd.odm.calls[0][0], "list_volumes")
self.assertEqual(dmd.odm.calls[1][0], "list_assignments")
self.assertEqual(dmd.odm.calls[2][0], "create_snapshot")
self.assertTrue('node' in dmd.odm.calls[2][3])
def test_delete_snapshot(self):
testsnap = {'id': 'ca253fd0-8068-11e4-98c0-5254008ea111'}
dmd = DrbdManageDriver(configuration=self.configuration)
dmd.odm = DrbdManageFakeDriver()
dmd.delete_snapshot(testsnap)
self.assertEqual(dmd.odm.calls[0][0], "list_snapshots")
self.assertEqual(dmd.odm.calls[1][0], "remove_snapshot")
def test_extend_volume(self):
testvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'ba253fd0-8068-11e4-98c0-5254008ea111',
'volume_type_id': 'drbdmanage',
'created_at': timeutils.utcnow()}
dmd = DrbdManageDriver(configuration=self.configuration)
dmd.odm = DrbdManageFakeDriver()
dmd.extend_volume(testvol, 5)
self.assertEqual(dmd.odm.calls[0][0], "list_volumes")
self.assertEqual(dmd.odm.calls[0][3]["cinder-id"], testvol['id'])
self.assertEqual(dmd.odm.calls[1][0], "resize_volume")
self.assertEqual(dmd.odm.calls[1][1], "res")
self.assertEqual(dmd.odm.calls[1][2], 2)
self.assertEqual(dmd.odm.calls[1][3], -1)
self.assertEqual(dmd.odm.calls[1][4]['size'], 5242880)
def test_create_cloned_volume(self):
srcvol = {'project_id': 'testprjid',
'name': 'testvol',
'size': 1,
'id': 'ba253fd0-8068-11e4-98c0-5254008ea111',
'volume_type_id': 'drbdmanage',
'created_at': timeutils.utcnow()}
newvol = {'id': 'ca253fd0-8068-11e4-98c0-5254008ea111'}
dmd = DrbdManageDriver(configuration=self.configuration)
dmd.odm = DrbdManageFakeDriver()
dmd.create_cloned_volume(newvol, srcvol)
self.assertEqual(dmd.odm.calls[0][0], "list_volumes")
self.assertEqual(dmd.odm.calls[1][0], "list_assignments")
self.assertEqual(dmd.odm.calls[2][0], "create_snapshot")
self.assertEqual(dmd.odm.calls[3][0], "list_snapshots")
self.assertEqual(dmd.odm.calls[4][0], "restore_snapshot")
self.assertEqual(dmd.odm.calls[5][0], "list_snapshots")
self.assertEqual(dmd.odm.calls[6][0], "remove_snapshot")
self.assertEqual(dmd.odm.calls[6][0], "remove_snapshot")
|
apache-2.0
| 8,359,004,270,935,753,000
| 35.942857
| 78
| 0.592679
| false
|
lliendo/Radar
|
radar/logger/__init__.py
|
1
|
2413
|
# -*- coding: utf-8 -*-
"""
This file is part of Radar.
Radar is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Radar is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Lesser GNU General Public License for more details.
You should have received a copy of the Lesser GNU General Public License
along with Radar. If not, see <http://www.gnu.org/licenses/>.
Copyright 2015 Lucas Liendo.
"""
from logging import getLogger, Formatter, shutdown, INFO
from logging.handlers import RotatingFileHandler
from os.path import dirname
from os import mkdir
from errno import EEXIST
from sys import stderr
class LoggerError(Exception):
pass
class RadarLogger(object):
_shared_state = {'logger': None}
def __init__(self, path, logger_name='radar', max_size=100, rotations=5):
self.__dict__ = self._shared_state
self._create_dir(path)
self._shared_state['logger'] = self._configure_logger(path, logger_name, max_size * (1024 ** 2), rotations)
def _create_dir(self, path):
try:
mkdir(dirname(path))
except OSError as e:
if e.errno != EEXIST:
raise LoggerError('Error - Couldn\'t create directory : \'{:}\'. Details : {:}.'.format(path, e.strerror))
def _configure_logger(self, path, logger_name, max_size, rotations):
try:
logger = getLogger(logger_name)
logger.setLevel(INFO)
file_handler = RotatingFileHandler(path, maxBytes=max_size, backupCount=rotations)
file_handler.setFormatter(Formatter(fmt='%(asctime)s - %(message)s', datefmt='%b %d %H:%M:%S'))
logger.addHandler(file_handler)
except Exception as e:
raise LoggerError('Error - Couldn\'t configure Radar logger. Details : {:}.'.format(e))
return logger
@staticmethod
def log(message):
try:
RadarLogger._shared_state['logger'].info(message)
except Exception as e:
stderr.write('Error - Couldn\'t log to Radar logger. Details : {:}.'.format(e))
@staticmethod
def shutdown():
shutdown()
|
lgpl-3.0
| -8,177,158,718,975,967,000
| 32.513889
| 122
| 0.665147
| false
|
nicfit/Clique
|
clique/app/identity.py
|
1
|
1960
|
# -*- coding: utf-8 -*-
import sys
import json
import argparse
import nicfit
from .. import Identity, IdentityChain
from .utils import prompt
from ..common import thumbprint, newJwk, jwkIsPrivate
@nicfit.command.register
class identity(nicfit.Command):
HELP = "Identity and stuffs"
def _initArgParser(self, parser):
parser.add_argument("-i", "--identity", default=None,
type=argparse.FileType('r'),
help="File containing an Identity in JSON format.")
parser.add_argument("-k", "--keyfile", default=None,
type=argparse.FileType('r'),
help="File containing a private JWK.")
parser.add_argument("--iss", default=None,
help="Identity issuer.")
def _run(self):
if self.args.identity:
ident = Identity.fromJson(json.loads(self.args.identity.read()))
else:
if self.args.keyfile:
try:
jwk = json.loads(self.args.keyfile.read())
key = newJwk(**jwk)
if not jwkIsPrivate(key):
raise ValueError(
"Key file does not contain a private key")
except Exception as ex:
print("Error loading key: " + str(ex), file=sys.stderr)
return 1
key._params["kid"] = thumbprint(key)
else:
key = Identity.generateKey()
iss = self.args.iss or prompt("iss? ")
ident = Identity(iss, key)
ident.idchain = IdentityChain.fromIdentity(ident,
ident.acct).serialize()
print(json.dumps(ident.toJson(private=True), indent=2, sort_keys=True))
idchain = IdentityChain.deserialize(ident.idchain)
print("\n## IdentityChain ##:\n" + str(idchain))
|
lgpl-3.0
| 3,020,030,026,125,875,700
| 35.981132
| 79
| 0.529082
| false
|
abramhindle/UnnaturalCodeFork
|
python/testdata/launchpad/lib/lp/registry/tests/test_distroseriesparent.py
|
1
|
9245
|
# Copyright 2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Tests for DistroSeriesParent model class."""
__metaclass__ = type
from testtools.matchers import MatchesStructure
from zope.component import getUtility
from zope.interface.verify import verifyObject
from zope.security.interfaces import Unauthorized
from lp.registry.interfaces.distroseriesparent import (
IDistroSeriesParent,
IDistroSeriesParentSet,
)
from lp.registry.interfaces.pocket import PackagePublishingPocket
from lp.soyuz.interfaces.component import IComponentSet
from lp.testing import (
login,
person_logged_in,
TestCaseWithFactory,
)
from lp.testing.layers import (
DatabaseFunctionalLayer,
ZopelessDatabaseLayer,
)
from lp.testing.sampledata import LAUNCHPAD_ADMIN
class TestDistroSeriesParent(TestCaseWithFactory):
"""Test the `DistroSeriesParent` model."""
layer = ZopelessDatabaseLayer
def test_verify_interface(self):
# Test the interface for the model.
dsp = self.factory.makeDistroSeriesParent()
verified = verifyObject(IDistroSeriesParent, dsp)
self.assertTrue(verified)
def test_properties(self):
# Test the model properties.
parent_series = self.factory.makeDistroSeries()
derived_series = self.factory.makeDistroSeries()
dsp = self.factory.makeDistroSeriesParent(
derived_series=derived_series,
parent_series=parent_series,
initialized=True)
self.assertThat(
dsp,
MatchesStructure.byEquality(
derived_series=derived_series,
parent_series=parent_series,
initialized=True,
is_overlay=False,
component=None,
pocket=None,
))
def test_properties_overlay(self):
# Test the model properties if the DSP represents an overlay.
parent_series = self.factory.makeDistroSeries()
derived_series = self.factory.makeDistroSeries()
universe_component = getUtility(IComponentSet).ensure('universe')
dsp = self.factory.makeDistroSeriesParent(
derived_series=derived_series,
parent_series=parent_series,
initialized=True,
is_overlay=True,
component=universe_component,
pocket=PackagePublishingPocket.SECURITY,
)
self.assertThat(
dsp,
MatchesStructure.byEquality(
derived_series=derived_series,
parent_series=parent_series,
initialized=True,
is_overlay=True,
component=universe_component,
pocket=PackagePublishingPocket.SECURITY,
))
def test_getByDerivedSeries(self):
parent_series = self.factory.makeDistroSeries()
derived_series = self.factory.makeDistroSeries()
self.factory.makeDistroSeriesParent(
derived_series, parent_series)
results = getUtility(IDistroSeriesParentSet).getByDerivedSeries(
derived_series)
self.assertEqual(1, results.count())
self.assertEqual(parent_series, results[0].parent_series)
# Making a second parent should add it to the results.
self.factory.makeDistroSeriesParent(
derived_series, self.factory.makeDistroSeries())
results = getUtility(IDistroSeriesParentSet).getByDerivedSeries(
derived_series)
self.assertEqual(2, results.count())
def test_getByParentSeries(self):
parent_series = self.factory.makeDistroSeries()
derived_series = self.factory.makeDistroSeries()
self.factory.makeDistroSeriesParent(
derived_series, parent_series)
results = getUtility(IDistroSeriesParentSet).getByParentSeries(
parent_series)
self.assertEqual(1, results.count())
self.assertEqual(derived_series, results[0].derived_series)
# Making a second child should add it to the results.
self.factory.makeDistroSeriesParent(
self.factory.makeDistroSeries(), parent_series)
results = getUtility(IDistroSeriesParentSet).getByParentSeries(
parent_series)
self.assertEqual(2, results.count())
class TestDistroSeriesParentSecurity(TestCaseWithFactory):
layer = DatabaseFunctionalLayer
def test_random_person_is_unauthorized(self):
dsp = self.factory.makeDistroSeriesParent()
person = self.factory.makePerson()
with person_logged_in(person):
self.assertRaises(
Unauthorized,
setattr, dsp, "derived_series", dsp.parent_series)
def assertCanEdit(self, dsp):
dsp.initialized = False
self.assertEquals(False, dsp.initialized)
def test_distroseries_drivers_can_edit(self):
# Test that distroseries drivers can edit the data.
dsp = self.factory.makeDistroSeriesParent()
person = self.factory.makePerson()
login(LAUNCHPAD_ADMIN)
dsp.derived_series.driver = person
with person_logged_in(person):
self.assertCanEdit(dsp)
def test_admins_can_edit(self):
dsp = self.factory.makeDistroSeriesParent()
login(LAUNCHPAD_ADMIN)
self.assertCanEdit(dsp)
def test_distro_owners_can_edit(self):
dsp = self.factory.makeDistroSeriesParent()
person = self.factory.makePerson()
login(LAUNCHPAD_ADMIN)
dsp.derived_series.distribution.owner = person
with person_logged_in(person):
self.assertCanEdit(dsp)
class TestOverlayTree(TestCaseWithFactory):
"""Test the overlay tree."""
layer = DatabaseFunctionalLayer
def test_getFlattenedOverlayTree(self):
#
# series
# |
# ----------------------------------
# | | | |
# o o | o
# | | | |
# parent11 parent21 parent31 parent41
# | |
# o o
# | | type of relation:
# parent12 parent22 | |
# | | o
# | | |
# | no overlay overlay
# parent13
#
distroseries = self.factory.makeDistroSeries()
parent11 = self.factory.makeDistroSeries()
parent12 = self.factory.makeDistroSeries()
parent21 = self.factory.makeDistroSeries()
universe_component = getUtility(IComponentSet).ensure('universe')
# series -> parent11
dsp_series_parent11 = self.factory.makeDistroSeriesParent(
derived_series=distroseries, parent_series=parent11,
initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
# parent11 -> parent12
dsp_parent11_parent12 = self.factory.makeDistroSeriesParent(
derived_series=parent11, parent_series=parent12,
initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
# parent12 -> parent13
self.factory.makeDistroSeriesParent(derived_series=parent12,
initialized=True, is_overlay=False)
# series -> parent21
dsp_series_parent21 = self.factory.makeDistroSeriesParent(
derived_series=distroseries, parent_series=parent21,
initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
# parent21 -> parent22
dsp_parent21_parent22 = self.factory.makeDistroSeriesParent(
derived_series=parent21, initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
# series -> parent31
self.factory.makeDistroSeriesParent(derived_series=distroseries,
initialized=True, is_overlay=False)
# series -> parent41
dsp_series_parent41 = self.factory.makeDistroSeriesParent(
derived_series=distroseries, initialized=True, is_overlay=True,
pocket=PackagePublishingPocket.RELEASE,
component=universe_component)
overlays = getUtility(
IDistroSeriesParentSet).getFlattenedOverlayTree(distroseries)
self.assertContentEqual(
[dsp_series_parent11, dsp_parent11_parent12, dsp_series_parent21,
dsp_parent21_parent22, dsp_series_parent41],
overlays)
def test_getFlattenedOverlayTree_empty(self):
distroseries = self.factory.makeDistroSeries()
self.factory.makeDistroSeriesParent(derived_series=distroseries,
initialized=True, is_overlay=False)
overlays = getUtility(
IDistroSeriesParentSet).getFlattenedOverlayTree(distroseries)
self.assertTrue(overlays.is_empty())
|
agpl-3.0
| -6,653,642,816,786,933,000
| 38.008439
| 77
| 0.630611
| false
|
jacquerie/inspire-dojson
|
inspire_dojson/hep/rules/bd0xx.py
|
1
|
12752
|
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2017 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""DoJSON rules for MARC fields in 0xx."""
from __future__ import absolute_import, division, print_function
import re
from collections import defaultdict
import pycountry
from dojson import utils
from idutils import is_arxiv_post_2007, is_doi, is_handle, normalize_doi
from inspire_schemas.api import load_schema
from inspire_schemas.utils import normalize_arxiv_category
from inspire_utils.helpers import force_list
from ..model import hep, hep2marc
from ...utils import force_single_element, normalize_isbn
RE_LANGUAGE = re.compile('\/| or | and |,|=|\s+')
@hep.over('isbns', '^020..')
@utils.for_each_value
def isbns(self, key, value):
"""Populate the ``isbns`` key."""
def _get_medium(value):
def _normalize(medium):
schema = load_schema('hep')
valid_media = schema['properties']['isbns']['items']['properties']['medium']['enum']
medium = medium.lower().replace('-', '').replace(' ', '')
if medium in valid_media:
return medium
elif medium == 'ebook':
return 'online'
elif medium == 'paperback':
return 'softcover'
return ''
medium = force_single_element(value.get('b', ''))
normalized_medium = _normalize(medium)
return normalized_medium
def _get_isbn(value):
a_value = force_single_element(value.get('a', ''))
normalized_a_value = a_value.replace('.', '')
if normalized_a_value:
return normalize_isbn(normalized_a_value)
return {
'medium': _get_medium(value),
'value': _get_isbn(value),
}
@hep2marc.over('020', 'isbns')
@utils.for_each_value
def isbns2marc(self, key, value):
"""Populate the ``020`` MARC field."""
return {
'a': value.get('value'),
'b': value.get('medium'),
}
@hep.over('dois', '^0247.')
def dois(self, key, value):
"""Populate the ``dois`` key.
Also populates the ``persistent_identifiers`` key through side effects.
"""
def _get_first_non_curator_source(sources):
sources_without_curator = [el for el in sources if el.upper() != 'CURATOR']
return force_single_element(sources_without_curator)
def _get_material(value):
MATERIAL_MAP = {
'ebook': 'publication',
}
q_value = force_single_element(value.get('q', ''))
normalized_q_value = q_value.lower()
return MATERIAL_MAP.get(normalized_q_value, normalized_q_value)
def _is_doi(id_, type_):
return (not type_ or type_.upper() == 'DOI') and is_doi(id_)
def _is_handle(id_, type_):
return (not type_ or type_.upper() == 'HDL') and is_handle(id_)
dois = self.get('dois', [])
persistent_identifiers = self.get('persistent_identifiers', [])
values = force_list(value)
for value in values:
id_ = force_single_element(value.get('a', ''))
material = _get_material(value)
schema = force_single_element(value.get('2', ''))
sources = force_list(value.get('9'))
source = _get_first_non_curator_source(sources)
if _is_doi(id_, schema):
dois.append({
'material': material,
'source': source,
'value': normalize_doi(id_),
})
else:
schema = 'HDL' if _is_handle(id_, schema) else schema
persistent_identifiers.append({
'material': material,
'schema': schema,
'source': source,
'value': id_,
})
self['persistent_identifiers'] = persistent_identifiers
return dois
@hep2marc.over('0247', '^dois$')
@utils.for_each_value
def dois2marc(self, key, value):
"""Populate the ``0247`` MARC field."""
return {
'2': 'DOI',
'9': value.get('source'),
'a': value.get('value'),
'q': value.get('material'),
}
@hep2marc.over('0247', '^persistent_identifiers$')
@utils.for_each_value
def persistent_identifiers2marc(self, key, value):
"""Populate the ``0247`` MARC field."""
return {
'2': value.get('schema'),
'9': value.get('source'),
'a': value.get('value'),
'q': value.get('material'),
}
@hep.over('texkeys', '^035..')
def texkeys(self, key, value):
"""Populate the ``texkeys`` key.
Also populates the ``external_system_identifiers`` and ``_desy_bookkeeping`` keys through side effects.
"""
def _is_oai(id_, schema):
return id_.startswith('oai:')
def _is_desy(id_, schema):
return id_ and schema in ('DESY',)
def _is_texkey(id_, schema):
return id_ and schema in ('INSPIRETeX', 'SPIRESTeX')
texkeys = self.get('texkeys', [])
external_system_identifiers = self.get('external_system_identifiers', [])
_desy_bookkeeping = self.get('_desy_bookkeeping', [])
values = force_list(value)
for value in values:
ids = force_list(value.get('a', ''))
other_ids = force_list(value.get('z', ''))
schema = force_single_element(value.get('9', ''))
for id_ in ids:
id_ = id_.strip()
if not id_:
continue
if _is_texkey(id_, schema):
texkeys.insert(0, id_)
elif _is_oai(id_, schema):
continue # XXX: ignored.
elif _is_desy(id_, schema):
_desy_bookkeeping.append({'identifier': id_})
else:
external_system_identifiers.insert(0, {
'schema': schema,
'value': id_,
})
for id_ in other_ids:
id_ = id_.strip()
if not id_:
continue
if _is_texkey(id_, schema):
texkeys.append(id_)
elif _is_oai(id_, schema):
continue # XXX: ignored.
elif _is_desy(id_, schema):
_desy_bookkeeping.append({'identifier': id_})
else:
external_system_identifiers.append({
'schema': schema,
'value': id_,
})
self['external_system_identifiers'] = external_system_identifiers
self['_desy_bookkeeping'] = _desy_bookkeeping
return texkeys
@hep2marc.over('035', '^texkeys$')
def texkeys2marc(self, key, value):
"""Populate the ``035`` MARC field."""
result = []
values = force_list(value)
if values:
value = values[0]
result.append({
'9': 'INSPIRETeX',
'a': value,
})
for value in values[1:]:
result.append({
'9': 'INSPIRETeX',
'z': value,
})
return result
@hep2marc.over('035', '^external_system_identifiers$')
def external_system_identifiers2marc(self, key, value):
"""Populate the ``035`` MARC field.
Also populates the ``970`` MARC field through side effects and an extra
``id_dict`` dictionary that holds potentially duplicate IDs that are
post-processed in a filter.
"""
def _is_scheme_cernkey(id_, schema):
return schema == 'CERNKEY'
def _is_scheme_spires(id_, schema):
return schema == 'SPIRES'
result_035 = self.get('035', [])
id_dict = self.get('id_dict', defaultdict(list))
result_970 = self.get('970', [])
values = force_list(value)
for value in values:
id_ = value.get('value')
schema = value.get('schema')
if _is_scheme_spires(id_, schema):
result_970.append({
'a': id_,
})
elif _is_scheme_cernkey(id_, schema):
result_035.append({
'9': 'CERNKEY',
'z': id_,
})
else:
id_dict[schema].append(id_)
self['970'] = result_970
self['id_dict'] = id_dict
return result_035
@hep.over('arxiv_eprints', '^037..')
def arxiv_eprints(self, key, value):
"""Populate the ``arxiv_eprints`` key.
Also populates the ``report_numbers`` key through side effects.
"""
def _get_clean_arxiv_eprint(id_):
return id_.split(':')[-1]
def _is_arxiv_eprint(id_, source):
return source == 'arXiv'
def _is_hidden_report_number(other_id, source):
return other_id
def _get_clean_source(source):
if source == 'arXiv:reportnumber':
return 'arXiv'
return source
arxiv_eprints = self.get('arxiv_eprints', [])
report_numbers = self.get('report_numbers', [])
values = force_list(value)
for value in values:
id_ = force_single_element(value.get('a', ''))
other_id = force_single_element(value.get('z', ''))
categories = [normalize_arxiv_category(category) for category
in force_list(value.get('c'))]
source = force_single_element(value.get('9', ''))
if _is_arxiv_eprint(id_, source):
arxiv_eprints.append({
'categories': categories,
'value': _get_clean_arxiv_eprint(id_),
})
elif _is_hidden_report_number(other_id, source):
report_numbers.append({
'hidden': True,
'source': _get_clean_source(source),
'value': other_id,
})
else:
report_numbers.append({
'source': _get_clean_source(source),
'value': id_,
})
self['report_numbers'] = report_numbers
return arxiv_eprints
@hep2marc.over('037', '^arxiv_eprints$')
def arxiv_eprints2marc(self, key, values):
"""Populate the ``037`` MARC field.
Also populates the ``035`` and the ``65017`` MARC fields through side effects.
"""
result_037 = self.get('037', [])
result_035 = self.get('035', [])
result_65017 = self.get('65017', [])
for value in values:
arxiv_id = value.get('value')
arxiv_id = 'arXiv:' + arxiv_id if is_arxiv_post_2007(arxiv_id) else arxiv_id
result_037.append({
'9': 'arXiv',
'a': arxiv_id,
'c': force_single_element(value.get('categories')),
})
result_035.append({
'9': 'arXiv',
'a': 'oai:arXiv.org:' + value.get('value'),
})
categories = force_list(value.get('categories'))
for category in categories:
result_65017.append({
'2': 'arXiv',
'a': category,
})
self['65017'] = result_65017
self['035'] = result_035
return result_037
@hep2marc.over('037', '^report_numbers$')
@utils.for_each_value
def report_numbers2marc(self, key, value):
"""Populate the ``037`` MARC field."""
def _get_mangled_source(source):
if source == 'arXiv':
return 'arXiv:reportnumber'
return source
source = _get_mangled_source(value.get('source'))
if value.get('hidden'):
return {
'9': source,
'z': value.get('value'),
}
return {
'9': source,
'a': value.get('value'),
}
@hep.over('languages', '^041..')
def languages(self, key, value):
"""Populate the ``languages`` key."""
languages = self.get('languages', [])
values = force_list(value.get('a'))
for value in values:
for language in RE_LANGUAGE.split(value):
try:
name = language.strip().capitalize()
languages.append(pycountry.languages.get(name=name).alpha_2)
except KeyError:
pass
return languages
@hep2marc.over('041', '^languages$')
@utils.for_each_value
def languages2marc(self, key, value):
"""Populate the ``041`` MARC field."""
return {'a': pycountry.languages.get(alpha_2=value).name.lower()}
|
gpl-3.0
| -6,248,574,016,785,603,000
| 28.587007
| 107
| 0.559912
| false
|
benhunter/py-stuff
|
misc/csgo-stats.py
|
1
|
12345
|
# https://old.reddit.com/r/GlobalOffensive/comments/8mjqgc/i_made_a_python_script_that_generates_stats_using/
# https://pastebin.com/LLpym05c
import datetime
import matplotlib.pyplot as plt
def min_to_sec(line): # converts minutes in string format 'XXX:XX' to seconds
seconds = 0
seconds += (int(line[-1]))
seconds += (int(line[-2])) * 10
seconds += (int(line[-4])) * 60
if line[-5].isdigit():
seconds += (int(line[-5])) * 600
if line[-6].isdigit():
seconds += (int(line[-6])) * 6000
return seconds
def create_plot(entries, plottitle, xaxlabel, filelabel, res, kdinput): # dont feel like commenting this tbh
if kdinput:
plt.hist(entries, bins=(int(max(entries) * res)))
else:
plt.hist(entries, bins=range(min(entries), max(entries) + 1, 1))
plt.title(plottitle)
if kdinput:
plt.xticks(range(0, int(max(entries))))
plt.xlabel(xaxlabel)
plt.ylabel('Occurrences')
ax = plt.gca()
ax.set_axisbelow(True)
ax.grid(color='b', linestyle=':', alpha=0.3, linewidth=1)
xleft, xright = ax.get_xlim()
ybottom, ytop = ax.get_ylim()
ax.set_aspect(abs((xright - xleft) / (ybottom - ytop)) * 0.4)
plt.savefig(filelabel, dpi=300)
plt.clf()
filename = input("Input File Name (e.g. stats.txt or stats.htm): ")
steamid = input("Your Steam ID: ")
# splits file into list of individual HTML element strings
file = open(filename, encoding="utf8").read().split('<')
stats = [] # contains lists of individual games
# Format: ['MAP', [D, M, Y], Q LENGTH, GAME LENGTH, GAME SCORE,[PING, K, A, D, MVP, HSP, Score]]
current_game = [0] * 6 # temporarily holds current game data
begin = False # for parsing through beginning of document
for i, line in enumerate(file):
line = line.strip()
if 'td>\n' in line: # game info lines begin with <td>\n for some reason
if 'Competitive' in line[10:]:
begin = True # begin storing document data here
current_game[0] = line[22:]
if line[10:12] == '20':
year = line[10:14]
month = line[15:17]
day = line[18:20]
current_game[1] = list(map(int, [day, month, year]))
if 'Wait Time:' in line[10:]:
current_game[2] = min_to_sec(line)
if 'Match Duration:' in line[10:]:
current_game[3] = min_to_sec(line)
# stores personal game data as list
if begin and line[0:7] == 'a class' and steamid in line:
ping = file[i + 4][3:]
k = file[i + 6][3:]
a = file[i + 8][3:]
d = file[i + 10][3:]
# had to do this because single MVPs don't contain the number '1' by the star
mvp = -1 # if MVP entry is empty
if file[i + 12][-2] == '>':
mvp = 1
else:
for j, char in enumerate(file[i + 12]):
if char.isdigit():
mvp = file[i + 12][j:]
break
# had to do this because some HSP entries are empty
hsp = -1 # if HSP entry is empty
if file[i + 14][-2].isdigit():
hsp = file[i + 14][3:len(file[i + 14]) - 1]
score = file[i + 16][3:]
# appends performance data (list of ints) to stats list as fifth 6th element
current_game[5] = list(map(int, [ping, k, a, d, mvp, hsp, score]))
# gets the match score and sorts it in a list of 2 ints (your score first)
if 'csgo_scoreboard_score' in line:
match_score = line[45:].split(' : ')
if not isinstance(current_game[5], list):
match_score.reverse()
current_game[4] = list(map(int, match_score))
if isinstance(current_game[4], list) and isinstance(current_game[5],
list): # individual game lists contain 6 entries
stats.append(current_game)
current_game = [0] * 6 # clears list before recording next game's info
current_game[3] = 1800 # 30 minute placeholder
# declaration of stat variables
total_kills = 0
total_deaths = 0
total_assists = 0
total_MVPs = 0
total_rounds_w = 0
total_rounds_l = 0
max_match_length = 0
min_match_length = 5400
win_streak = 0
loss_streak = 0
tie_streak = 0
max_win_streak = 0
max_loss_streak = 0
max_tie_streak = 0
total_score = 0
hsp = [] # list containing all hsps
mvp = [] # list containing all mvps
map_plays = {} # dict containing maps (keys) and plays (vals)
# initializing output file
output = open('output.txt', 'w')
stats.reverse()
# looping through every 'stats' entry (game lists)
for i, stat in enumerate(stats):
# writing a list of every match to the output file
output.write('\n' + str(i) + ': ' + repr(stat))
# summing K, D, A, MVP
total_kills += stat[5][1]
total_deaths += stat[5][3]
total_assists += stat[5][2]
total_MVPs += stat[5][4]
total_rounds_w += stat[4][0]
total_rounds_l += stat[4][1]
total_score += stat[5][6]
# creating list of Headshot Percentages (-1 excluded because -1 means no entry was listed)
if stat[5][5] >= 0:
hsp.append(stat[5][5])
# creating list of MVPs (-1 excluded because -1 means no entry was listed)
if stat[5][4] >= 0:
mvp.append(stat[5][4])
# finding the longest match
if stat[3] > max_match_length:
max_match_length = stat[3]
max_match_index = i
if stat[3] < min_match_length:
min_match_length = stat[3]
min_match_index = i
# builds dictionary containing maps and number of times map has been played
if stat[0] not in map_plays:
map_plays[stat[0]] = 1
else:
map_plays[stat[0]] += 1
###########################################################################
# convoluted way of calculating win/tie/loss streaks:
if stat[4][0] > stat[4][1]:
win_streak += 1
loss_streak, tie_streak = 0, 0
elif stat[4][0] == stat[4][1]:
tie_streak += 1
win_streak, loss_streak = 0, 0
else:
loss_streak += 1
win_streak, tie_streak = 0, 0
if win_streak > max_win_streak:
max_win_streak = win_streak
max_win_index = i
if tie_streak > max_tie_streak:
max_tie_streak = tie_streak
max_tie_index = i
if loss_streak > max_loss_streak:
max_loss_streak = loss_streak
max_loss_index = i
################################################################################
# writing output to output.txt file
output.write('\nFormat: [\'MAP\', [D, M, Y], QUEUE LENGTH, GAME LENGTH, GAME SCORE, [PING, K, A, D, MVP, HSP, Score]]')
output.write('\n\nSTATS----------------------------------------------------------------\n')
output.write('{:<20} {:>7}'.format('\nTotal Kills:', total_kills))
output.write('{:<20} {:>7}'.format('\nTotal Deaths:', total_deaths))
output.write('{:<20} {:>7}'.format('\nTotal Assists:', total_assists))
output.write('{:<20} {:>7}'.format('\nTotal MVPs:', total_MVPs))
kdr = round(total_kills / total_deaths, 3)
output.write('{:<20} {:>7}'.format('\nK/D:', kdr))
output.write('\n')
output.write('{:<20} {:>7}'.format('\nTotal Rounds Won:', total_rounds_w))
output.write('{:<20} {:>7}'.format('\nTotal Rounds Lost:', total_rounds_l))
output.write('\n\nAverages (per game):')
output.write('\n\t{:<15} {:>8}'.format('K:', round(total_kills / len(stats), 2)))
output.write('\n\t{:<15} {:>8}'.format('D:', round(total_deaths / len(stats), 2)))
output.write('\n\t{:<15} {:>8}'.format('A:', round(total_assists / len(stats), 2)))
output.write('\n\t{:<15} {:>8}'.format('MVP:', round(total_MVPs / len(stats), 2)))
output.write('\n\t{:<15} {:>8}'.format('Score:', round(total_score / len(stats), 2)))
avg_rounds_won = round(total_rounds_w / len(stats), 1)
avg_rounds_lost = round(total_rounds_l / len(stats), 1)
output.write('\n\t{:<10} {} : {}'.format('Match (W:L):', avg_rounds_won, avg_rounds_lost))
total_rounds = total_rounds_l + total_rounds_w
output.write('\n\nAverages (per round):')
output.write('\n\t{:<15} {:>8}'.format('K:', round(total_kills / total_rounds, 2)))
output.write('\n\t{:<15} {:>8}'.format('D:', round(total_deaths / total_rounds, 2)))
output.write('\n\t{:<15} {:>8}'.format('A:', round(total_assists / total_rounds, 2)))
output.write('\n\t{:<15} {:>8}'.format('MVP:', round(total_MVPs / total_rounds, 2)))
output.write('\n\nHSP:')
output.write('\n\t{:<10} {:>8}%'.format('Max:', round(max(hsp), 2)))
output.write('\n\t{:<10} {:>8}%'.format('Min:', round(min(hsp), 2)))
output.write('\n\t{:<10} {:>8}%'.format('Avg:', round(sum(hsp) / len(hsp), 1)))
output.write(
'\n\nLongest Match:\t\t{}\t\t(game #{})'.format(datetime.timedelta(seconds=max_match_length), max_match_index))
output.write(
'\nShortest Match:\t\t{}\t\t(game #{})'.format(datetime.timedelta(seconds=min_match_length), min_match_index))
output.write(
'\nMax Win Streak: \t{}\t\t(from game #{} to #{})'.format(max_win_streak, max_win_index - max_win_streak + 1,
max_win_index))
output.write(
'\nMax Tie Streak: \t{}\t\t(from game #{} to #{})'.format(max_tie_streak, max_tie_index - max_tie_streak + 1,
max_tie_index))
output.write(
'\nMax Loss Streak: \t{}\t\t(from game #{} to #{})'.format(max_loss_streak, max_loss_index - max_loss_streak + 1,
max_loss_index))
output.write('\n\nMap Plays:')
for entry in sorted(map_plays, key=map_plays.get, reverse=True):
output.write('\n\t{:<12} {:>12}'.format(entry, map_plays[entry]))
print('\'output.txt\' can be found in the same directory as this script')
output.close()
#####################################################################
# graphing and graphing calculations done below
# lists containing raw vals for each stat
kd = []
kills = []
deaths = []
assists = []
mvps = []
hsps = []
rw = [] # rounds won
rl = []
games_played = {}
for stat in stats:
# collects vals from each game
kills.append(stat[5][1])
deaths.append(stat[5][3])
assists.append(stat[5][2])
if stat[5][4] == -1:
mvps.append(0)
else:
mvps.append(stat[5][4])
if stat[5][5] == -1:
hsps.append(0)
else:
hsps.append(stat[5][5])
if stat[5][3] > 0:
kd.append(stat[5][1] / stat[5][3])
else:
kd.append(1)
if stat[4][0] < 15:
rw.append(stat[4][0])
if stat[4][1] < 15:
rl.append(stat[4][1])
if stat[1][2] * 12 + stat[1][1] not in games_played:
games_played[stat[1][2] * 12 + stat[1][1]] = 1
else:
games_played[stat[1][2] * 12 + stat[1][1]] += 1
plt.rc('font', size=8)
create_plot(kd, 'K/D Distribution', 'K/D (resolution: 0.05)', 'KD_Distribution.png', 20, True)
kd_trimmed = [x for x in kd if x <= 3]
create_plot(kd_trimmed, 'K/D Distribution (truncated at x = 3)', 'K/D (resolution: 0.01)',
'KD_Distribution (TRIMMED).png', 100, True)
create_plot(kills, 'Kill Distribution', 'Kills', 'Kill_Distribution.png', 0, False)
create_plot(deaths, 'Death Distribution', 'Deaths', 'Death_Distribution.png', 0, False)
create_plot(assists, 'Assist Distribution', 'Assists', 'Assist_Distribution.png', 0, False)
create_plot(mvps, 'MVP Distribution', 'MVPs', 'MVP_Distribution.png', 0, False)
create_plot(hsps, 'HSP Distribution', 'HSP', 'HSP_Distribution.png', 0, False)
create_plot(rw, 'Rounds Won Distribution (exc. 15, 16)', 'Rounds', 'RW_Distribution.png', 0, False)
create_plot(rl, 'Rounds Lost Distribution (exc. 15, 16)', 'Rounds', 'RL_Distribution.png', 0, False)
# graphing games played
games_played_x = []
games_played_y = []
for entry in sorted(games_played):
games_played_x.append(entry - 1)
games_played_y.append(games_played[entry])
games_played_x_string = []
for entry in games_played_x:
year = int(entry / 12)
month = (entry % 12) + 1
monthyear = str(month) + '-' + str(year)
games_played_x_string.append(monthyear)
plt.bar(games_played_x, games_played_y)
plt.title('Games Played Per Month')
plt.xlabel('Month')
plt.ylabel('Occurrences')
plt.xticks(games_played_x[::4], games_played_x_string[::4], rotation='45')
plt.savefig('Games_Played.png', dpi=300)
plt.clf()
print('output images can be found in the same directory as this script')
|
mit
| 500,048,332,103,682,240
| 35.202346
| 119
| 0.580154
| false
|
kgiusti/gofer
|
src/gofer/messaging/consumer.py
|
1
|
4280
|
# Copyright (c) 2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from time import sleep
from logging import getLogger
from gofer.common import Thread, released
from gofer.messaging.model import DocumentError
from gofer.messaging.adapter.model import Reader
log = getLogger(__name__)
class ConsumerThread(Thread):
"""
An AMQP (abstract) consumer.
"""
def __init__(self, node, url, wait=3):
"""
:param node: An AMQP queue.
:type node: gofer.messaging.adapter.model.Node
:param url: The broker URL.
:type url: str
:param wait: Number of seconds to wait for a message.
:type wait: int
"""
Thread.__init__(self, name=node.name)
self.url = url
self.node = node
self.wait = wait
self.authenticator = None
self.reader = None
self.setDaemon(True)
def shutdown(self):
"""
Shutdown the consumer.
"""
self.abort()
@released
def run(self):
"""
Main consumer loop.
"""
self.reader = Reader(self.node, self.url)
self.reader.authenticator = self.authenticator
self.open()
try:
while not Thread.aborted():
self.read()
finally:
self.close()
def open(self):
"""
Open the reader.
"""
while not Thread.aborted():
try:
self.reader.open()
break
except Exception:
log.exception(self.getName())
sleep(30)
def close(self):
"""
Close the reader.
"""
try:
self.reader.close()
except Exception:
log.exception(self.getName())
def read(self):
"""
Read and process incoming documents.
"""
try:
wait = self.wait
reader = self.reader
message, document = reader.next(wait)
if message is None:
# wait expired
return
log.debug('{%s} read: %s', self.getName(), document)
self.dispatch(document)
message.ack()
except DocumentError, de:
self.rejected(de.code, de.description, de.document, de.details)
except Exception:
log.exception(self.getName())
sleep(60)
self.close()
self.open()
def rejected(self, code, description, document, details):
"""
Called to process the received (invalid) document.
This method intended to be overridden by subclasses.
:param code: The rejection code.
:type code: str
:param description: rejection description
:type description: str
:param document: The received *json* document.
:type document: str
:param details: The explanation.
:type details: str
"""
log.debug('rejected: %s', document)
def dispatch(self, document):
"""
Called to process the received document.
This method intended to be overridden by subclasses.
:param document: The received *json* document.
:type document: str
"""
log.debug('dispatched: %s', document)
class Consumer(ConsumerThread):
"""
An AMQP consumer.
Thread used to consumer messages from the specified queue.
On receipt, each message is used to build an document
and passed to dispatch().
"""
def __init__(self, node, url=None):
"""
:param node: The AMQP node.
:type node: gofer.messaging.adapter.model.Node
:param url: The broker URL.
:type url: str
"""
super(Consumer, self).__init__(node, url)
|
lgpl-2.1
| 7,477,042,401,144,376,000
| 27.918919
| 75
| 0.573598
| false
|
pavel-paulau/perfrunner
|
perfrunner/tests/kv.py
|
1
|
12499
|
from logger import logger
from perfrunner.helpers.cbmonitor import timeit, with_stats
from perfrunner.helpers.worker import (
pillowfight_data_load_task,
pillowfight_task,
)
from perfrunner.tests import PerfTest
from perfrunner.workloads.pathoGen import PathoGen
from perfrunner.workloads.tcmalloc import WorkloadGen
class KVTest(PerfTest):
@with_stats
def access(self, *args):
super().access(*args)
def run(self):
self.load()
self.wait_for_persistence()
self.hot_load()
self.reset_kv_stats()
self.access()
self.report_kpi()
class ReadLatencyTest(KVTest):
"""Enable reporting of GET latency."""
COLLECTORS = {'latency': True}
def _report_kpi(self):
self.reporter.post(
*self.metrics.kv_latency(operation='get')
)
class MixedLatencyTest(ReadLatencyTest):
"""Enable reporting of GET and SET latency."""
def _report_kpi(self):
for operation in ('get', 'set'):
self.reporter.post(
*self.metrics.kv_latency(operation=operation)
)
class DGMTest(KVTest):
COLLECTORS = {'disk': True, 'net': False}
class DGMCompactionTest(DGMTest):
def run(self):
self.load()
self.wait_for_persistence()
self.hot_load()
self.reset_kv_stats()
self.compact_bucket(wait=False)
self.access()
self.report_kpi()
class DGMCompactedTest(DGMTest):
def run(self):
self.load()
self.wait_for_persistence()
self.compact_bucket()
self.hot_load()
self.reset_kv_stats()
self.access()
self.report_kpi()
class ReadLatencyDGMTest(KVTest):
COLLECTORS = {'disk': True, 'latency': True, 'net': False}
def _report_kpi(self):
self.reporter.post(
*self.metrics.kv_latency(operation='get')
)
class MixedLatencyDGMTest(ReadLatencyDGMTest):
def _report_kpi(self):
for operation in ('get', 'set'):
self.reporter.post(
*self.metrics.kv_latency(operation=operation)
)
class ReadLatencyDGMCompactionTest(DGMCompactionTest):
COLLECTORS = {'disk': True, 'latency': True, 'net': False}
def _report_kpi(self):
self.reporter.post(
*self.metrics.kv_latency(operation='get')
)
class ReadLatencyDGMCompactedTest(DGMCompactedTest):
COLLECTORS = {'disk': True, 'latency': True, 'net': False}
def _report_kpi(self):
for percentile in 99.9, 99.99:
self.reporter.post(
*self.metrics.kv_latency(operation='get', percentile=percentile)
)
class DurabilityTest(KVTest):
"""Enable reporting of persistTo=1 and replicateTo=1 latency."""
COLLECTORS = {'durability': True}
def _report_kpi(self):
for operation in ('replicate_to', 'persist_to'):
self.reporter.post(
*self.metrics.kv_latency(operation=operation,
collector='durability')
)
class SubDocTest(MixedLatencyTest):
"""Enable reporting of SubDoc latency."""
COLLECTORS = {'latency': True}
class XATTRTest(MixedLatencyTest):
"""Enable reporting of XATTR latency."""
COLLECTORS = {'latency': True}
def run(self):
self.load()
self.xattr_load()
self.wait_for_persistence()
self.access()
self.report_kpi()
class DrainTest(DGMCompactionTest):
"""Enable reporting of average disk write queue size."""
def _report_kpi(self):
self.reporter.post(
*self.metrics.avg_disk_write_queue()
)
class InitialLoadTest(DrainTest):
@with_stats
def load(self, *args, **kwargs):
super().load(*args, **kwargs)
def run(self):
self.load()
self.report_kpi()
class IngestionTest(KVTest):
COLLECTORS = {'disk': True, 'net': False}
@with_stats
def access(self, *args, **kwargs):
super(KVTest, self).access(*args, **kwargs)
self.wait_for_persistence()
def _report_kpi(self):
self.reporter.post(
*self.metrics.avg_total_queue_age()
)
class WarmupTest(PerfTest):
"""Measure the time it takes to perform cluster warm up."""
COLLECTORS = {'net': False}
@with_stats
def warmup(self):
self.remote.stop_server()
self.remote.drop_caches()
return self._warmup()
@timeit
def _warmup(self):
self.remote.start_server()
for master in self.cluster_spec.masters:
for bucket in self.test_config.buckets:
self.monitor.monitor_warmup(self.memcached, master, bucket)
def _report_kpi(self, time_elapsed):
self.reporter.post(
*self.metrics.elapsed_time(time_elapsed)
)
def run(self):
self.load()
self.wait_for_persistence()
self.access()
self.wait_for_persistence()
time_elapsed = self.warmup()
self.report_kpi(time_elapsed)
class FragmentationTest(PerfTest):
"""Implement the append-only workload.
Scenario:
1. Single node.
2. Load X items, 700-1400 bytes, average 1KB (11-22 fields).
3. Append data
3.1. Mark first 80% of items as working set.
3.2. Randomly update 75% of items in working set by adding 1 field at a time (62 bytes).
3.3. Mark first 40% of items as working set.
3.4. Randomly update 75% of items in working set by adding 1 field at a time (62 bytes).
3.5. Mark first 20% of items as working set.
3.6. Randomly update 75% of items in working set by adding 1 field at a time (62 bytes).
4. Repeat step #3 5 times.
See workloads/tcmalloc.py for details.
Scenario described above allows to spot issues with memory/allocator
fragmentation.
"""
COLLECTORS = {'net': False}
@with_stats
def load_and_append(self):
password = self.test_config.bucket.password
WorkloadGen(self.test_config.load_settings.items,
self.master_node, self.test_config.buckets[0],
password).run()
def calc_fragmentation_ratio(self) -> float:
ratios = []
for target in self.target_iterator:
port = self.rest.get_memcached_port(target.node)
stats = self.memcached.get_stats(target.node, port, target.bucket,
stats='memory')
ratio = int(stats[b'mem_used']) / int(stats[b'total_heap_bytes'])
ratios.append(ratio)
ratio = 100 * (1 - sum(ratios) / len(ratios))
ratio = round(ratio, 1)
logger.info('Fragmentation: {}'.format(ratio))
return ratio
def _report_kpi(self):
ratio = self.calc_fragmentation_ratio()
self.reporter.post(
*self.metrics.fragmentation_ratio(ratio)
)
def run(self):
self.load_and_append()
self.report_kpi()
class FragmentationLargeTest(FragmentationTest):
@with_stats
def load_and_append(self):
password = self.test_config.bucket.password
WorkloadGen(self.test_config.load_settings.items,
self.master_node, self.test_config.buckets[0], password,
small=False).run()
class PathoGenTest(FragmentationTest):
@with_stats
def access(self, *args):
for target in self.target_iterator:
pg = PathoGen(num_items=self.test_config.load_settings.items,
num_workers=self.test_config.load_settings.workers,
num_iterations=self.test_config.load_settings.iterations,
frozen_mode=False,
host=target.node, port=8091,
bucket=target.bucket, password=target.password)
pg.run()
def _report_kpi(self):
self.reporter.post(
*self.metrics.avg_memcached_rss()
)
self.reporter.post(
*self.metrics.max_memcached_rss()
)
def run(self):
self.access()
self.report_kpi()
class PathoGenFrozenTest(PathoGenTest):
@with_stats
def access(self):
for target in self.target_iterator:
pg = PathoGen(num_items=self.test_config.load_settings.items,
num_workers=self.test_config.load_settings.workers,
num_iterations=self.test_config.load_settings.iterations,
frozen_mode=True,
host=target.node, port=8091,
bucket=target.bucket, password=target.password)
pg.run()
class ThroughputTest(KVTest):
def _measure_curr_ops(self) -> int:
ops = 0
for bucket in self.test_config.buckets:
for server in self.cluster_spec.servers:
port = self.rest.get_memcached_port(server)
stats = self.memcached.get_stats(server, port, bucket)
for stat in b'cmd_get', b'cmd_set':
ops += int(stats[stat])
return ops
def _report_kpi(self):
total_ops = self._measure_curr_ops()
self.reporter.post(
*self.metrics.kv_throughput(total_ops)
)
class EvictionTest(KVTest):
COLLECTORS = {'net': False}
def reset_kv_stats(self):
pass
def _measure_ejected_items(self) -> int:
ejected_items = 0
for bucket in self.test_config.buckets:
for hostname, _ in self.rest.get_node_stats(self.master_node,
bucket):
host = hostname.split(':')[0]
port = self.rest.get_memcached_port(host)
stats = self.memcached.get_stats(host, port, bucket)
ejected_items += int(stats[b'vb_active_auto_delete_count'])
ejected_items += int(stats[b'vb_pending_auto_delete_count'])
ejected_items += int(stats[b'vb_replica_auto_delete_count'])
return ejected_items
def _report_kpi(self):
ejected_items = self._measure_ejected_items()
self.reporter.post(
*self.metrics.kv_throughput(ejected_items)
)
class PillowFightTest(PerfTest):
"""Use cbc-pillowfight from libcouchbase to drive cluster."""
ALL_BUCKETS = True
def load(self, *args):
PerfTest.load(self, task=pillowfight_data_load_task)
@with_stats
def access(self, *args):
self.download_certificate()
PerfTest.access(self, task=pillowfight_task)
def _report_kpi(self, *args):
self.reporter.post(
*self.metrics.max_ops()
)
def run(self):
self.load()
self.wait_for_persistence()
self.access()
self.report_kpi()
class CompressionTest(PillowFightTest):
COLLECTORS = {'iostat': False, 'net': False}
@with_stats
@timeit
def wait_for_compression(self):
for master in self.cluster_spec.masters:
for bucket in self.test_config.buckets:
self.monitor.monitor_compression(self.memcached, master, bucket)
def _report_kpi(self, time_elapsed: float):
self.reporter.post(
*self.metrics.compression_throughput(time_elapsed)
)
def run(self):
self.load()
time_elapsed = self.wait_for_compression()
self.report_kpi(time_elapsed)
class CompactionTest(KVTest):
COLLECTORS = {'net': False}
@with_stats
@timeit
def compact(self):
self.compact_bucket()
def _report_kpi(self, time_elapsed):
self.reporter.post(
*self.metrics.elapsed_time(time_elapsed)
)
def run(self):
self.load()
self.wait_for_persistence()
self.hot_load()
self.access_bg()
time_elapsed = self.compact()
self.report_kpi(time_elapsed)
class MemoryOverheadTest(PillowFightTest):
COLLECTORS = {'iostat': False, 'net': False}
PF_KEY_SIZE = 20
def _report_kpi(self):
self.reporter.post(
*self.metrics.memory_overhead(key_size=self.PF_KEY_SIZE)
)
@with_stats
def access(self, *args):
self.sleep()
class CpuUtilizationTest(KVTest):
def _report_kpi(self, *args, **kwargs):
self.reporter.post(
*self.metrics.cpu_utilization()
)
|
apache-2.0
| -1,810,513,932,737,493,200
| 23.799603
| 96
| 0.587807
| false
|
voronovim/mikrotik-api-tools
|
api_tools/ini_parser.py
|
1
|
1045
|
import configparser
class Config(object):
def __init__(self):
self.config = configparser.ConfigParser()
self.config.read('../config.ini')
def get_general(self):
general = {'debug': self.config.get('general', 'debug')}
return general
def get_ftp(self):
ftp = {'host': self.config.get('ftp', 'host'),
'port': self.config.get('ftp', 'port'),
'username': self.config.get('ftp', 'username'),
'password': self.config.get('ftp', 'password')}
return ftp
def get_devices(self):
for section in self.config.sections():
if section != 'general' and section != 'ftp':
device = {'host': self.config.get(section, 'host'),
'username': self.config.get(section, 'username'),
'password': self.config.get(section, 'password'),
'dst-path': self.config.get(section, 'path')}
# Return generator
yield device
|
mit
| -4,069,631,633,620,106,000
| 35.034483
| 75
| 0.522488
| false
|
gthank/patois
|
setup.py
|
1
|
1536
|
from __future__ import (print_function, absolute_import,
unicode_literals, division)
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import patois
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
with open('README.rst', 'r') as readme:
patois_long_description = readme.read()
with open('LICENSE', 'r') as license:
patois_license = license.read()
patois_classifiers = (
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: BSD',
'Operating System :: POSIX :: Linux',
'Operating System :: POSIX :: Linux',
'Operating System :: Microsoft :: Windows',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Utilities',
)
setup(
name='patois',
version=patois.__version__,
description='Python VM compatibility library',
long_description=patois_long_description,
author='Hank Gay',
author_email='hank@realultimateprogramming.com',
url="https://pypi.python.org/pypi/patois/",
py_modules=['patois',],
license=patois_license,
zip_safe=False,
classifiers=patois_classifiers,
)
|
mit
| 7,708,837,189,164,913,000
| 25.482759
| 56
| 0.64974
| false
|
OCA/manufacture
|
mrp_multi_level/tests/test_mrp_multi_level.py
|
1
|
15081
|
# Copyright 2018-21 ForgeFlow S.L. (https://www.forgeflow.com)
# (http://www.eficent.com)
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo.addons.mrp_multi_level.tests.common import TestMrpMultiLevelCommon
from odoo import fields
from datetime import date, datetime
class TestMrpMultiLevel(TestMrpMultiLevelCommon):
def test_01_mrp_levels(self):
"""Tests computation of MRP levels."""
self.assertEqual(self.fp_1.llc, 0)
self.assertEqual(self.fp_2.llc, 0)
self.assertEqual(self.sf_1.llc, 1)
self.assertEqual(self.sf_2.llc, 1)
self.assertEqual(self.pp_1.llc, 2)
self.assertEqual(self.pp_2.llc, 2)
def test_02_product_mrp_area(self):
"""Tests that mrp products are generated correctly."""
product_mrp_area = self.product_mrp_area_obj.search([
('product_id', '=', self.pp_1.id)])
self.assertEqual(product_mrp_area.supply_method, 'buy')
self.assertEqual(product_mrp_area.main_supplier_id, self.vendor)
self.assertEqual(product_mrp_area.qty_available, 10.0)
product_mrp_area = self.product_mrp_area_obj.search(
[("product_id", "=", self.sf_1.id)]
)
self.assertEqual(product_mrp_area.supply_method, "manufacture")
self.assertFalse(product_mrp_area.main_supplier_id)
self.assertFalse(product_mrp_area.main_supplierinfo_id)
def test_03_mrp_moves(self):
"""Tests for mrp moves generated."""
moves = self.mrp_move_obj.search([
('product_id', '=', self.pp_1.id),
])
self.assertEqual(len(moves), 3)
self.assertNotIn('s', moves.mapped('mrp_type'))
for move in moves:
self.assertTrue(move.planned_order_up_ids)
if move.planned_order_up_ids.product_mrp_area_id.product_id == \
self.fp_1:
# Demand coming from FP-1
self.assertEqual(
move.planned_order_up_ids.mrp_action, "manufacture")
self.assertEqual(move.mrp_qty, -200.0)
elif move.planned_order_up_ids.product_mrp_area_id.product_id == \
self.sf_1:
# Demand coming from FP-2 -> SF-1
self.assertEqual(
move.planned_order_up_ids.mrp_action, "manufacture")
if move.mrp_date == self.date_5:
self.assertEqual(move.mrp_qty, -90.0)
elif move.mrp_date == self.date_8:
self.assertEqual(move.mrp_qty, -72.0)
# Check actions:
planned_orders = self.planned_order_obj.search([
('product_id', '=', self.pp_1.id),
])
self.assertEqual(len(planned_orders), 3)
for plan in planned_orders:
self.assertEqual(plan.mrp_action, 'buy')
# Check PP-2 PO being accounted:
po_move = self.mrp_move_obj.search([
('product_id', '=', self.pp_2.id),
('mrp_type', '=', 's'),
])
self.assertEqual(len(po_move), 1)
self.assertEqual(po_move.purchase_order_id, self.po)
self.assertEqual(po_move.purchase_line_id, self.po.order_line)
def test_04_mrp_multi_level(self):
"""Tests MRP inventories created."""
# FP-1
fp_1_inventory_lines = self.mrp_inventory_obj.search(
[('product_mrp_area_id.product_id', '=', self.fp_1.id)])
self.assertEqual(len(fp_1_inventory_lines), 1)
self.assertEqual(fp_1_inventory_lines.date, self.date_7)
self.assertEqual(fp_1_inventory_lines.demand_qty, 100.0)
self.assertEqual(fp_1_inventory_lines.to_procure, 100.0)
# FP-2
fp_2_line_1 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.fp_2.id),
('date', '=', self.date_7)])
self.assertEqual(len(fp_2_line_1), 1)
self.assertEqual(fp_2_line_1.demand_qty, 15.0)
self.assertEqual(fp_2_line_1.to_procure, 15.0)
# TODO: ask odoo to fix it... should be date10
fp_2_line_2 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.fp_2.id),
('date', '=', self.date_9)])
self.assertEqual(len(fp_2_line_2), 1)
self.assertEqual(fp_2_line_2.demand_qty, 0.0)
self.assertEqual(fp_2_line_2.to_procure, 0.0)
self.assertEqual(fp_2_line_2.supply_qty, 12.0)
# SF-1
sf_1_line_1 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.sf_1.id),
('date', '=', self.date_6)])
self.assertEqual(len(sf_1_line_1), 1)
self.assertEqual(sf_1_line_1.demand_qty, 30.0)
self.assertEqual(sf_1_line_1.to_procure, 30.0)
sf_1_line_2 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.sf_1.id),
('date', '=', self.date_9)])
self.assertEqual(len(sf_1_line_2), 1)
self.assertEqual(sf_1_line_2.demand_qty, 24.0)
self.assertEqual(sf_1_line_2.to_procure, 24.0)
# SF-2
sf_2_line_1 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.sf_2.id),
('date', '=', self.date_6)])
self.assertEqual(len(sf_2_line_1), 1)
self.assertEqual(sf_2_line_1.demand_qty, 45.0)
self.assertEqual(sf_2_line_1.to_procure, 30.0)
sf_2_line_2 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.sf_2.id),
('date', '=', self.date_9)])
self.assertEqual(len(sf_2_line_2), 1)
self.assertEqual(sf_2_line_2.demand_qty, 36.0)
self.assertEqual(sf_2_line_2.to_procure, 36.0)
# PP-1
pp_1_line_1 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.pp_1.id),
('date', '=', self.date_5)])
self.assertEqual(len(pp_1_line_1), 1)
self.assertEqual(pp_1_line_1.demand_qty, 290.0)
self.assertEqual(pp_1_line_1.to_procure, 280.0)
pp_1_line_2 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.pp_1.id),
('date', '=', self.date_8)])
self.assertEqual(len(pp_1_line_2), 1)
self.assertEqual(pp_1_line_2.demand_qty, 72.0)
self.assertEqual(pp_1_line_2.to_procure, 72.0)
# PP-2
pp_2_line_1 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.pp_2.id),
('date', '=', self.date_3)])
self.assertEqual(len(pp_2_line_1), 1)
self.assertEqual(pp_2_line_1.demand_qty, 90.0)
# 90.0 demand - 20.0 on hand - 5.0 on PO = 65.0
self.assertEqual(pp_2_line_1.to_procure, 65.0)
pp_2_line_2 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.pp_2.id),
('date', '=', self.date_5)])
self.assertEqual(len(pp_2_line_2), 1)
self.assertEqual(pp_2_line_2.demand_qty, 360.0)
self.assertEqual(pp_2_line_2.to_procure, 360.0)
pp_2_line_3 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.pp_2.id),
('date', '=', self.date_6)])
self.assertEqual(len(pp_2_line_3), 1)
self.assertEqual(pp_2_line_3.demand_qty, 108.0)
self.assertEqual(pp_2_line_3.to_procure, 108.0)
pp_2_line_4 = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.pp_2.id),
('date', '=', self.date_8)])
self.assertEqual(len(pp_2_line_4), 1)
self.assertEqual(pp_2_line_4.demand_qty, 48.0)
self.assertEqual(pp_2_line_4.to_procure, 48.0)
def test_05_planned_availability(self):
"""Test planned availability computation."""
# Running availability for PP-1:
invs = self.mrp_inventory_obj.search([
('product_id', '=', self.pp_1.id)],
order='date')
self.assertEqual(len(invs), 2)
expected = [0.0, 0.0] # No grouping, lot size nor safety stock.
self.assertEqual(invs.mapped('running_availability'), expected)
def test_06_procure_mo(self):
"""Test procurement wizard with MOs."""
mos = self.mo_obj.search([
('product_id', '=', self.fp_1.id)])
self.assertFalse(mos)
mrp_inv = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.fp_1.id)])
self.mrp_inventory_procure_wiz.with_context({
'active_model': 'mrp.inventory',
'active_ids': mrp_inv.ids,
'active_id': mrp_inv.id,
}).create({}).make_procurement()
mos = self.mo_obj.search([
('product_id', '=', self.fp_1.id)])
self.assertTrue(mos)
self.assertEqual(mos.product_qty, 100.0)
mo_date_start = fields.Date.to_date(mos.date_planned_start)
self.assertEqual(mo_date_start, self.date_5)
def test_07_adjust_qty_to_order(self):
"""Test the adjustments made to the qty to procure when minimum,
maximum order quantities and quantity multiple are set."""
# minimum order quantity:
mrp_inv_min = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.prod_min.id)])
self.assertEqual(mrp_inv_min.to_procure, 50.0)
# maximum order quantity:
mrp_inv_max = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.prod_max.id)])
self.assertEqual(mrp_inv_max.to_procure, 150)
plans = self.planned_order_obj.search([
('product_id', '=', self.prod_max.id),
])
self.assertEqual(len(plans), 2)
self.assertIn(100.0, plans.mapped('mrp_qty'))
self.assertIn(50.0, plans.mapped('mrp_qty'))
# quantity multiple:
mrp_inv_multiple = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.prod_multiple.id)])
self.assertEqual(mrp_inv_multiple.to_procure, 125)
def test_08_group_demand(self):
"""Test demand grouping functionality, `nbr_days`."""
pickings = self.stock_picking_obj.search([
('product_id', '=', self.prod_test.id),
('location_id', '=', self.sec_loc.id)])
self.assertEqual(len(pickings), 5)
moves = self.mrp_move_obj.search([
('product_id', '=', self.prod_test.id),
('mrp_area_id', '=', self.secondary_area.id),
])
supply_plans = self.planned_order_obj.search([
('product_id', '=', self.prod_test.id),
('mrp_area_id', '=', self.secondary_area.id),
])
moves_demand = moves.filtered(lambda m: m.mrp_type == 'd')
self.assertEqual(len(moves_demand), 5)
# two groups expected:
# 1. days 8, 9 and 10.
# 2. days 20, and 22.
self.assertEqual(len(supply_plans), 2)
quantities = supply_plans.mapped('mrp_qty')
week_1_expected = sum(moves_demand[0:3].mapped('mrp_qty'))
self.assertIn(abs(week_1_expected), quantities)
week_2_expected = sum(moves_demand[3:].mapped('mrp_qty'))
self.assertIn(abs(week_2_expected), quantities)
def test_09_isolated_mrp_area_run(self):
"""Test running MRP for just one area."""
self.mrp_multi_level_wiz.sudo(self.mrp_manager).create({
'mrp_area_ids': [(6, 0, self.secondary_area.ids)],
}).run_mrp_multi_level()
this = self.mrp_inventory_obj.search([
('mrp_area_id', '=', self.secondary_area.id)], limit=1)
self.assertTrue(this)
# Only recently exectued areas should have been created by test user:
self.assertEqual(this.create_uid, self.mrp_manager)
prev = self.mrp_inventory_obj.search([
('mrp_area_id', '!=', self.secondary_area.id)], limit=1)
self.assertNotEqual(this.create_uid, prev.create_uid)
def test_11_special_scenario_1(self):
"""When grouping demand supply and demand are in the same day but
supply goes first."""
moves = self.mrp_move_obj.search([
('product_id', '=', self.product_scenario_1.id)])
self.assertEqual(len(moves), 4)
mrp_invs = self.mrp_inventory_obj.search([
('product_id', '=', self.product_scenario_1.id)])
self.assertEqual(len(mrp_invs), 2)
# Net needs = 124 + 90 - 87 = 127 -> 130 (because of qty multiple)
self.assertEqual(mrp_invs[0].to_procure, 130)
# Net needs = 18, available on-hand = 3 -> 15
self.assertEqual(mrp_invs[1].to_procure, 15)
def test_bom_line_attribute_value_skip(self):
"""Check for the correct demand on components of a product with
multiple variants"""
# No demand or supply for AV-12 or AV-21
av_12_supply = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.av_12.id)
])
self.assertFalse(av_12_supply)
av_21_supply = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.av_21.id)
])
self.assertFalse(av_21_supply)
# Supply for AV-11 and AV-22
av_11_supply = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.av_11.id)
])
self.assertTrue(av_11_supply)
av_22_supply = self.mrp_inventory_obj.search([
('product_mrp_area_id.product_id', '=', self.av_22.id)
])
self.assertTrue(av_22_supply)
def test_13_timezone_handling(self):
self.calendar.tz = "Australia/Sydney" # Oct-Apr/Apr-Oct: UTC+11/UTC+10
date_move = datetime(2090, 4, 19, 20, 00) # Apr 20 6/7 am in Sidney
sidney_date = date(2090, 4, 20)
self._create_picking_in(
self.product_tz, 10.0, date_move, location=self.cases_loc
)
self.mrp_multi_level_wiz.create(
{"mrp_area_ids": [(6, 0, self.cases_area.ids)]}
).run_mrp_multi_level()
inventory = self.mrp_inventory_obj.search(
[
("mrp_area_id", "=", self.cases_area.id),
("product_id", "=", self.product_tz.id),
]
)
self.assertEqual(len(inventory), 1)
self.assertEqual(inventory.date, sidney_date)
def test_14_timezone_not_set(self):
self.wh.calendar_id = False
date_move = datetime(2090, 4, 19, 20, 00)
self._create_picking_in(
self.product_tz, 10.0, date_move, location=self.cases_loc
)
self.mrp_multi_level_wiz.create(
{"mrp_area_ids": [(6, 0, self.cases_area.ids)]}
).run_mrp_multi_level()
inventory = self.mrp_inventory_obj.search(
[
("mrp_area_id", "=", self.cases_area.id),
("product_id", "=", self.product_tz.id),
]
)
self.assertEqual(len(inventory), 1)
self.assertEqual(inventory.date, date_move.date())
|
agpl-3.0
| -5,775,510,911,736,473,000
| 44.7
| 79
| 0.571315
| false
|
ProjectQ-Framework/ProjectQ
|
projectq/setups/decompositions/cnot2cz.py
|
1
|
1282
|
# -*- coding: utf-8 -*-
# Copyright 2018 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Registers a decomposition to for a CNOT gate in terms of CZ and Hadamard.
"""
from projectq.cengines import DecompositionRule
from projectq.meta import Compute, get_control_count, Uncompute
from projectq.ops import CZ, H, X
def _decompose_cnot(cmd):
"""Decompose CNOT gates."""
ctrl = cmd.control_qubits
eng = cmd.engine
with Compute(eng):
H | cmd.qubits[0]
CZ | (ctrl[0], cmd.qubits[0][0])
Uncompute(eng)
def _recognize_cnot(cmd):
return get_control_count(cmd) == 1
#: Decomposition rules
all_defined_decomposition_rules = [DecompositionRule(X.__class__, _decompose_cnot, _recognize_cnot)]
|
apache-2.0
| 4,183,703,901,696,003,600
| 31.871795
| 100
| 0.711388
| false
|
Elico-Corp/odoo_OCB
|
addons/l10n_br/account.py
|
4
|
1811
|
# -*- encoding: utf-8 -*-
import openerp
from openerp.osv import fields, osv
TAX_DEFAULTS = {
'base_reduction': 0,
'amount_mva': 0,
'amount_type': 'percent',
}
class account_tax_template(osv.osv):
""" Add fields used to define some brazilian taxes """
_inherit = 'account.tax.template'
_columns = {
'tax_discount': fields.boolean('Discount this Tax in Prince',
help="Mark it for (ICMS, PIS e etc.)."),
'base_reduction': fields.float('Redution', required=True,
digits=0,
help="Um percentual decimal em % entre 0-1."),
'amount_mva': fields.float('MVA Percent', required=True,
digits=0,
help="Um percentual decimal em % entre 0-1."),
}
_defaults = TAX_DEFAULTS
class account_tax(osv.osv):
""" Add fields used to define some brazilian taxes """
_inherit = 'account.tax'
_columns = {
'tax_discount': fields.boolean('Discount this Tax in Prince',
help="Mark it for (ICMS, PIS e etc.)."),
'base_reduction': fields.float('Redution', required=True,
digits=0,
help="Um percentual decimal em % entre 0-1."),
'amount_mva': fields.float('MVA Percent', required=True,
digits=0,
help="Um percentual decimal em % entre 0-1."),
}
_defaults = TAX_DEFAULTS
|
agpl-3.0
| 5,387,798,143,675,682,000
| 41.116279
| 92
| 0.440641
| false
|
tomasdubec/openstack-cinder
|
cinder/tests/test_huawei.py
|
1
|
36222
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2012 Huawei Technologies Co., Ltd.
# Copyright (c) 2012 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for HUAWEI volume driver.
"""
from xml.etree import ElementTree as ET
from cinder import exception
from cinder.openstack.common import log as logging
from cinder import test
from cinder.volume import configuration as conf
from cinder.volume.drivers.huawei import huawei_iscsi
LOG = logging.getLogger(__name__)
FakeXML = """<?xml version="1.0" encoding="UTF-8" ?>
<config>
<Storage>
<ControllerIP0>10.10.10.1</ControllerIP0>
<ControllerIP1>10.10.10.2</ControllerIP1>
<UserName>admin</UserName>
<UserPassword>123456</UserPassword>
</Storage>
<LUN>
<LUNType>Thick</LUNType>
<StripUnitSize>64</StripUnitSize>
<WriteType>1</WriteType>
<MirrorSwitch>1</MirrorSwitch>
<Prefetch Type="3" Value="0"/>
<StoragePool Name="RAID_001"/>
<StoragePool Name="RAID_002"/>
</LUN>
<iSCSI>
<DefaultTargetIP>192.168.100.1</DefaultTargetIP>
<Initiator Name="iqn.1993-08.debian:01:ec2bff7ac3a3"
TargetIP="192.168.100.2"/>
</iSCSI>
</config>"""
LUNInfo = {'ID': None,
'Name': None,
'Size': None,
'LUN WWN': None,
'Status': None,
'Visible Capacity': None,
'Stripe Unit Size': None,
'Disk Pool ID': None,
'Format Progress': None,
'Cache Prefetch Strategy': None,
'LUNType': None,
'Cache Write Strategy': None,
'Running Cache Write Strategy': None,
'Consumed Capacity': None,
'Pool ID': None,
'SnapShot ID': None,
'LunCopy ID': None,
'Whether Private LUN': None,
'Remote Replication ID': None,
'Split mirror ID': None,
'Owner Controller': None,
'Worker Controller': None,
'RAID Group ID': None}
LUNInfoCopy = {'ID': None,
'Name': None,
'Size': None,
'LUN WWN': None,
'Status': None,
'Visible Capacity': None,
'Stripe Unit Size': None,
'Disk Pool ID': None,
'Format Progress': None,
'Cache Prefetch Strategy': None,
'LUNType': None,
'Cache Write Strategy': None,
'Running Cache Write Strategy': None,
'Consumed Capacity': None,
'Pool ID': None,
'SnapShot ID': None,
'LunCopy ID': None,
'Whether Private LUN': None,
'Remote Replication ID': None,
'Split mirror ID': None,
'Owner Controller': None,
'Worker Controller': None,
'RAID Group ID': None}
SnapshotInfo = {'Source LUN ID': None,
'Source LUN Name': None,
'ID': None,
'Name': None,
'Type': 'Public',
'Status': None,
'Time Stamp': '2013-01-15 14:00:00',
'Rollback Start Time': '--',
'Rollback End Time': '--',
'Rollback Speed': '--',
'Rollback Progress': '--'}
MapInfo = {'Host Group ID': None,
'Host Group Name': None,
'File Engine Cluster': None,
'Host ID': None,
'Host Name': None,
'Os Type': None,
'INI Port ID': None,
'INI Port Name': None,
'INI Port Info': None,
'Port Type': None,
'Link Status': None,
'LUN WWN': None,
'DEV LUN ID': None,
'Host LUN ID': None}
HostPort = {'ID': None,
'Name': None,
'Info': None}
LUNCopy = {'Name': None,
'ID': None,
'Type': None,
'State': None,
'Status': 'Disable'}
FakeVolume = {'name': 'Volume-lele34fe-223f-dd33-4423-asdfghjklqwe',
'size': '2',
'id': '0',
'wwn': '630303710030303701094b2b00000031',
'provider_auth': None}
FakeVolumeCopy = {'name': 'Volume-jeje34fe-223f-dd33-4423-asdfghjklqwg',
'size': '3',
'ID': '1',
'wwn': '630303710030303701094b2b0000003'}
FakeLUNCopy = {'ID': '1',
'Type': 'FULL',
'State': 'Created',
'Status': 'Normal'}
FakeSnapshot = {'name': 'keke34fe-223f-dd33-4423-asdfghjklqwf',
'volume_name': 'Volume-lele34fe-223f-dd33-4423-asdfghjklqwe',
'id': '3'}
FakePoolInfo = {'ID': '2',
'Level': 'RAID6',
'Status': 'Normal',
'Free Capacity': '10240',
'Disk List': '0,1;0,2;0,3;0,4;0,5;0,6',
'Name': 'RAID_001',
'Type': 'Thick'}
FakeConfInfo = {'HostGroup': 'HostGroup_OpenStack',
'HostnamePrefix': 'Host_',
'DefaultTargetIP': '192.168.100.1',
'TargetIQN': 'iqn.2006-08.com.huawei:oceanspace:2103037:',
'TargetIQN-T': 'iqn.2006-08.com.huawei:oceanspace:2103037::'
'20001:192.168.100.2',
'TargetIQN-Dorado5100': 'iqn.2006-08.com.huawei:oceanspace:'
'2103037::192.168.100.2',
'TargetIQN-Dorado2100G2': 'iqn.2006-08.com.huawei:oceanspace:'
'2103037::192.168.100.2-20001',
'Initiator Name': 'iqn.1993-08.debian:01:ec2bff7ac3a3',
'Initiator TargetIP': '192.168.100.2'}
FakeConnector = {'initiator': "iqn.1993-08.debian:01:ec2bff7ac3a3"}
class HuaweiVolumeTestCase(test.TestCase):
def __init__(self, *args, **kwargs):
super(HuaweiVolumeTestCase, self).__init__(*args, **kwargs)
self.driver = FakeHuaweiStorage(configuration=conf.Configuration(None))
self.driver.do_setup({})
self.driver._test_flg = 'check_for_fail'
self._test_check_for_setup_errors()
def setUp(self):
super(HuaweiVolumeTestCase, self).setUp()
def test_create_export_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_export,
{}, FakeVolume)
def test_delete_volume_failed(self):
self._test_delete_volume()
def test_create_snapshot_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot,
FakeSnapshot)
def test_delete_snapshot_failed(self):
self._test_delete_snapshot()
def test_create_luncopy_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
FakeVolumeCopy, FakeSnapshot)
def test_initialize_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.initialize_connection,
FakeVolume, FakeConnector)
def test_terminate_connection_failed(self):
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.terminate_connection,
FakeVolume, FakeConnector)
def test_normal(self):
# test for T Series
self.driver._test_flg = 'check_for_T'
self._test_check_for_setup_errors()
self._test_create_volume()
self._test_create_export()
self._test_create_snapshot()
self._test_create_volume_from_snapshot()
self._test_initialize_connection_for_T()
self._test_terminate_connection()
self._test_delete_snapshot()
self._test_delete_volume()
self._test_get_get_volume_stats()
# test for Dorado2100 G2
self.driver._test_flg = 'check_for_Dorado2100G2'
self._test_check_for_setup_errors()
self._test_create_volume()
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_snapshot,
FakeSnapshot)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
FakeVolumeCopy, FakeSnapshot)
self._test_initialize_connection_for_Dorado2100G2()
self._test_terminate_connection()
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_snapshot,
FakeSnapshot)
self._test_delete_volume()
# test for Dorado5100
self.driver._test_flg = 'check_for_Dorado5100'
self._test_check_for_setup_errors()
self._test_create_volume()
self._test_create_snapshot()
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.create_volume_from_snapshot,
FakeVolumeCopy, FakeSnapshot)
self._test_initialize_connection_for_Dorado5100()
self._test_terminate_connection()
self._test_delete_snapshot()
self._test_delete_volume()
def _test_check_for_setup_errors(self):
self.driver.check_for_setup_error()
def _test_create_volume(self):
self.driver.create_volume(FakeVolume)
self.assertNotEqual(LUNInfo["ID"], None)
self.assertEqual(LUNInfo["RAID Group ID"], FakePoolInfo['ID'])
def _test_delete_volume(self):
self.driver.delete_volume(FakeVolume)
self.assertEqual(LUNInfo["ID"], None)
def _test_create_snapshot(self):
self.driver.create_snapshot(FakeSnapshot)
self.assertNotEqual(SnapshotInfo["ID"], None)
self.assertNotEqual(LUNInfo["ID"], None)
self.assertEqual(SnapshotInfo["Status"], 'Active')
self.assertEqual(SnapshotInfo["Source LUN ID"], LUNInfo["ID"])
def _test_delete_snapshot(self):
self.driver.delete_snapshot(FakeSnapshot)
self.assertEqual(SnapshotInfo["ID"], None)
def _test_create_volume_from_snapshot(self):
self.driver.create_volume_from_snapshot(FakeVolumeCopy, FakeSnapshot)
self.assertNotEqual(LUNInfoCopy["ID"], None)
def _test_create_export(self):
retval = self.driver.create_export({}, FakeVolume)
self.assertNotEqual(retval, FakeVolume["id"])
def _test_initialize_connection_for_T(self):
connection_data = self.driver.initialize_connection(FakeVolume,
FakeConnector)
iscsi_properties = connection_data['data']
self.assertEquals(iscsi_properties['target_iqn'],
FakeConfInfo['TargetIQN-T'])
self.assertEquals(iscsi_properties['target_portal'],
FakeConfInfo['Initiator TargetIP'] + ':3260')
self.assertEqual(MapInfo["DEV LUN ID"], FakeVolume['id'])
self.assertEqual(MapInfo["INI Port Info"],
FakeConnector['initiator'])
def _test_initialize_connection_for_Dorado2100G2(self):
connection_data = self.driver.initialize_connection(FakeVolume,
FakeConnector)
iscsi_properties = connection_data['data']
self.assertEquals(iscsi_properties['target_iqn'],
FakeConfInfo['TargetIQN-Dorado2100G2'])
self.assertEquals(iscsi_properties['target_portal'],
FakeConfInfo['Initiator TargetIP'] + ':3260')
self.assertEqual(MapInfo["DEV LUN ID"], FakeVolume['id'])
self.assertEqual(MapInfo["INI Port Info"],
FakeConnector['initiator'])
def _test_initialize_connection_for_Dorado5100(self):
connection_data = self.driver.initialize_connection(FakeVolume,
FakeConnector)
iscsi_properties = connection_data['data']
self.assertEquals(iscsi_properties['target_iqn'],
FakeConfInfo['TargetIQN-Dorado5100'])
self.assertEquals(iscsi_properties['target_portal'],
FakeConfInfo['Initiator TargetIP'] + ':3260')
self.assertEqual(MapInfo["DEV LUN ID"], FakeVolume['id'])
self.assertEqual(MapInfo["INI Port Info"],
FakeConnector['initiator'])
def _test_terminate_connection(self):
self.driver.terminate_connection(FakeVolume, FakeConnector)
self.assertEqual(MapInfo["DEV LUN ID"], None)
self.assertEqual(MapInfo["Host LUN ID"], None)
self.assertEqual(MapInfo["INI Port Info"], None)
def _test_get_get_volume_stats(self):
stats = self.driver.get_volume_stats(True)
fakecapacity = str(int(float(FakePoolInfo['Free Capacity']) / 1024))
self.assertEqual(stats['free_capacity_gb'], fakecapacity)
class FakeHuaweiStorage(huawei_iscsi.HuaweiISCSIDriver):
"""Fake Huawei Storage, Rewrite some methods of HuaweiISCSIDriver."""
def __init__(self, *args, **kwargs):
super(FakeHuaweiStorage, self).__init__(*args, **kwargs)
self._test_flg = None
def _execute_cli(self, cmdIn):
cmd = cmdIn.split(' ')[0].lower()
if cmd == 'showsys':
if ((self._test_flg == 'check_for_fail') or
(self._test_flg == 'check_for_T')):
out = """/>showsys
==========================================================================
System Information
--------------------------------------------------------------------------
System Name | SN_S5500T-xu-0123456789
Device Type | Oceanstor S5500T
Current System Mode | Double Controllers Normal
Mirroring Link Status | Link Up
Location |
Time | 2013-01-01 01:01:01
Product Version | V100R005C00
===========================================================================
"""
elif self._test_flg == 'check_for_Dorado2100G2':
out = """/>showsys
==========================================================================
System Information
--------------------------------------------------------------------------
System Name | SN_Dorado2100_G2
Device Type | Oceanstor Dorado2100 G2
Current System Mode | Double Controllers Normal
Mirroring Link Status | Link Up
Location |
Time | 2013-01-01 01:01:01
Product Version | V100R001C00
===========================================================================
"""
elif self._test_flg == 'check_for_Dorado5100':
out = """/>showsys
==========================================================================
System Information
--------------------------------------------------------------------------
System Name | SN_Dorado5100
Device Type | Oceanstor Dorado5100
Current System Mode | Double Controllers Normal
Mirroring Link Status | Link Up
Location |
Time | 2013-01-01 01:01:01
Product Version | V100R001C00
===========================================================================
"""
elif cmd == 'addhostmap':
MapInfo['DEV LUN ID'] = LUNInfo['ID']
MapInfo['LUN WWN'] = LUNInfo['LUN WWN']
MapInfo['Host LUN ID'] = '0'
out = 'command operates successfully'
elif cmd == 'showhostmap':
if MapInfo['DEV LUN ID'] is None:
out = 'command operates successfully, but no information.'
else:
out = """/>showhostmap
==========================================================================
Map Information
--------------------------------------------------------------------------
Map ID Working Controller Dev LUN ID LUN WWN Host LUN ID Mapped to \
RAID ID Dev LUN Cap(MB) Map Type Whether Command LUN Pool ID
---------------------------------------------------------------------------
2147483649 %s %s %s %s Host: %s %s %s HOST No --
===========================================================================
""" % (LUNInfo['Worker Controller'], LUNInfo['ID'], LUNInfo['LUN WWN'],
MapInfo['Host ID'], MapInfo['Host ID'], LUNInfo['RAID Group ID'],
str(int(LUNInfo['Size']) * 1024))
elif cmd == 'delhostmap':
MapInfo['DEV LUN ID'] = None
MapInfo['LUN WWN'] = None
MapInfo['Host LUN ID'] = None
out = 'command operates successfully'
elif cmd == 'createsnapshot':
SnapshotInfo['Source LUN ID'] = LUNInfo['ID']
SnapshotInfo['Source LUN Name'] = LUNInfo['Name']
SnapshotInfo['ID'] = FakeSnapshot['id']
SnapshotInfo['Name'] = self._name_translate(FakeSnapshot['name'])
SnapshotInfo['Status'] = 'Disable'
out = 'command operates successfully'
elif cmd == 'actvsnapshot':
SnapshotInfo['Status'] = 'Active'
out = 'command operates successfully'
elif cmd == 'disablesnapshot':
SnapshotInfo['Status'] = 'Disable'
out = 'command operates successfully'
elif cmd == 'delsnapshot':
SnapshotInfo['Source LUN ID'] = None
SnapshotInfo['Source LUN Name'] = None
SnapshotInfo['ID'] = None
SnapshotInfo['Name'] = None
SnapshotInfo['Status'] = None
out = 'command operates successfully'
elif cmd == 'showsnapshot':
if SnapshotInfo['ID'] is None:
out = 'command operates successfully, but no information.'
else:
out = """/>showsnapshot
==========================================================================
Snapshot Information
--------------------------------------------------------------------------
Name ID Type Status Time Stamp
--------------------------------------------------------------------------
%s %s Public %s 2013-01-15 14:21:13
==========================================================================
""" % (SnapshotInfo['Name'], SnapshotInfo['ID'], SnapshotInfo['Status'])
elif cmd == 'showlunsnapshot':
if SnapshotInfo['ID'] is None:
out = """Current LUN is not a source LUN"""
else:
out = """/>showlunsnapshot -lun 2
==========================================================================
Snapshot of LUN
--------------------------------------------------------------------------
Name ID Type Status Time Stamp
--------------------------------------------------------------------------
%s %s Public %s 2013-01-15 14:17:19
==========================================================================
""" % (SnapshotInfo['Name'], SnapshotInfo['ID'], SnapshotInfo['Status'])
elif cmd == 'createlun':
if LUNInfo['ID'] is None:
LUNInfo['Name'] = self._name_translate(FakeVolume['name'])
LUNInfo['ID'] = FakeVolume['id']
LUNInfo['Size'] = FakeVolume['size']
LUNInfo['LUN WWN'] = FakeVolume['wwn']
LUNInfo['Owner Controller'] = 'A'
LUNInfo['Worker Controller'] = 'A'
LUNInfo['RAID Group ID'] = FakePoolInfo['ID']
else:
LUNInfoCopy['Name'] = \
self._name_translate(FakeVolumeCopy['name'])
LUNInfoCopy['ID'] = FakeVolumeCopy['ID']
LUNInfoCopy['Size'] = FakeVolumeCopy['size']
LUNInfoCopy['LUN WWN'] = FakeVolumeCopy['wwn']
LUNInfoCopy['Owner Controller'] = 'A'
LUNInfoCopy['Worker Controller'] = 'A'
LUNInfoCopy['RAID Group ID'] = FakePoolInfo['ID']
out = 'command operates successfully'
elif cmd == 'dellun':
LUNInfo['Name'] = None
LUNInfo['ID'] = None
LUNInfo['Size'] = None
LUNInfo['LUN WWN'] = None
LUNInfo['Owner Controller'] = None
LUNInfo['Worker Controller'] = None
LUNInfo['RAID Group ID'] = None
out = 'command operates successfully'
elif cmd == 'showlun':
if LUNInfo['ID'] is None:
out = 'command operates successfully, but no information.'
elif LUNInfoCopy['ID'] is None:
if ((self._test_flg == 'check_for_fail') or
(self._test_flg == 'check_for_T')):
out = """/>showlun
===========================================================================
LUN Information
---------------------------------------------------------------------------
ID RAID Group ID Disk Pool ID Status Controller Visible Capacity(MB) \
LUN Name Stripe Unit Size(KB) Lun Type
---------------------------------------------------------------------------
%s %s -- Normal %s %s %s 64 THICK
===========================================================================
""" % (LUNInfo['ID'], LUNInfo['RAID Group ID'], LUNInfo['Owner Controller'],
str(int(LUNInfo['Size']) * 1024), LUNInfo['Name'])
elif self._test_flg == 'check_for_Dorado2100G2':
out = """/>showlun
===========================================================================
LUN Information
---------------------------------------------------------------------------
ID Status Controller Visible Capacity(MB) LUN Name Lun Type
---------------------------------------------------------------------------
%s Normal %s %s %s THICK
===========================================================================
""" % (LUNInfo['ID'], LUNInfo['Owner Controller'],
str(int(LUNInfo['Size']) * 1024), LUNInfo['Name'])
elif self._test_flg == 'check_for_Dorado5100':
out = """/>showlun
===========================================================================
LUN Information
---------------------------------------------------------------------------
ID RAIDgroup ID Status Controller Visible Capacity(MB) LUN Name
Strip Unit Size(KB) Lun Type
---------------------------------------------------------------------------
%s %s Normal %s %s %s 64 THICK
===========================================================================
""" % (LUNInfo['ID'], LUNInfo['RAID Group ID'],
LUNInfo['Owner Controller'], str(int(LUNInfo['Size']) * 1024),
LUNInfo['Name'])
else:
if ((self._test_flg == 'check_for_fail') or
(self._test_flg == 'check_for_T')):
out = """/>showlun
============================================================================
LUN Information
----------------------------------------------------------------------------
ID RAID Group ID Disk Pool ID Status Controller Visible Capacity(MB)\
LUN Name Stripe Unit Size(KB) Lun Type
----------------------------------------------------------------------------
%s %s -- Normal %s %s %s 64 THICK
%s %s -- Normal %s %s %s 64 THICK
============================================================================
""" % (LUNInfo['ID'], LUNInfo['RAID Group ID'], LUNInfo['Owner Controller'],
str(int(LUNInfo['Size']) * 1024), LUNInfo['Name'], LUNInfoCopy['ID'],
LUNInfoCopy['RAID Group ID'], LUNInfoCopy['Owner Controller'],
str(int(LUNInfoCopy['Size']) * 1024), LUNInfoCopy['Name'])
elif self._test_flg == 'check_for_Dorado2100G2':
out = """/>showlun
===========================================================================
LUN Information
---------------------------------------------------------------------------
ID Status Controller Visible Capacity(MB) LUN Name Lun Type
---------------------------------------------------------------------------
%s Normal %s %s %s THICK
%s Normal %s %s %s THICK
===========================================================================
""" % (LUNInfo['ID'], LUNInfo['Owner Controller'],
str(int(LUNInfo['Size']) * 1024), LUNInfo['Name'],
LUNInfoCopy['ID'], LUNInfoCopy['Owner Controller'],
str(int(LUNInfoCopy['Size']) * 1024), LUNInfoCopy['Name'])
elif self._test_flg == 'check_for_Dorado5100':
out = """/>showlun
===========================================================================
LUN Information
---------------------------------------------------------------------------
ID RAIDgroup ID Status Controller Visible Capacity(MB) LUN Name \
Strip Unit Size(KB) Lun Type
---------------------------------------------------------------------------
%s %s Normal %s %s %s 64 THICK
%s %s Norma %s %s %s 64 THICK
===========================================================================
""" % (LUNInfo['ID'], LUNInfo['RAID Group ID'], LUNInfo['Owner Controller'],
str(int(LUNInfo['Size']) * 1024), LUNInfo['Name'],
LUNInfoCopy['ID'], LUNInfoCopy['RAID Group ID'],
LUNInfoCopy['Owner Controller'], str(int(LUNInfoCopy['Size']) * 1024),
LUNInfoCopy['Name'])
elif cmd == 'createhostgroup':
MapInfo['Host Group ID'] = '1'
MapInfo['Host Group Name'] = FakeConfInfo['HostGroup']
out = 'command operates successfully'
elif cmd == 'showhostgroup':
if MapInfo['Host Group ID'] is None:
out = """/>showhostgroup
============================================================
Host Group Information
------------------------------------------------------------
Host Group ID Name File Engine Cluster
------------------------------------------------------------
0 Default Group NO
============================================================
"""
else:
out = """/>showhostgroup
============================================================
Host Group Information
------------------------------------------------------------
Host Group ID Name File Engine Cluster
------------------------------------------------------------
0 Default Group NO
%s %s NO
============================================================
""" % (MapInfo['Host Group ID'], MapInfo['Host Group Name'])
elif cmd == 'addhost':
MapInfo['Host ID'] = '1'
MapInfo['Host Name'] = FakeConfInfo['HostnamePrefix'] + \
str(hash(FakeConnector['initiator']))
MapInfo['Os Type'] = 'Linux'
out = 'command operates successfully'
elif cmd == 'delhost':
MapInfo['Host ID'] = None
MapInfo['Host Name'] = None
MapInfo['Os Type'] = None
out = 'command operates successfully'
elif cmd == 'showhost':
if MapInfo['Host ID'] is None:
out = 'command operates successfully, but no information.'
else:
out = """/>showhost
=======================================================
Host Information
-------------------------------------------------------
Host ID Host Name Host Group ID Os Type
-------------------------------------------------------
%s %s %s Linux
=======================================================
""" % (MapInfo['Host ID'], MapInfo['Host Name'], MapInfo['Host Group ID'])
elif cmd == 'createluncopy':
LUNCopy['Name'] = LUNInfoCopy['Name']
LUNCopy['ID'] = FakeLUNCopy['ID']
LUNCopy['Type'] = FakeLUNCopy['Type']
LUNCopy['State'] = FakeLUNCopy['State']
LUNCopy['Status'] = FakeLUNCopy['Status']
out = 'command operates successfully'
elif cmd == 'delluncopy':
LUNCopy['Name'] = None
LUNCopy['ID'] = None
LUNCopy['Type'] = None
LUNCopy['State'] = None
LUNCopy['Status'] = None
out = 'command operates successfully'
elif cmd == 'chgluncopystatus':
LUNCopy['State'] = 'Complete'
out = 'command operates successfully'
elif cmd == 'showluncopy':
if LUNCopy['ID'] is None:
out = 'command operates successfully, but no information.'
else:
out = """/>showluncopy
============================================================================
LUN Copy Information
----------------------------------------------------------------------------
LUN Copy Name LUN Copy ID Type LUN Copy State LUN Copy Status
----------------------------------------------------------------------------
%s %s %s %s %s
============================================================================
""" % (LUNCopy['Name'], LUNCopy['ID'], LUNCopy['Type'],
LUNCopy['State'], LUNCopy['Status'])
elif cmd == 'showiscsitgtname':
if ((self._test_flg == 'check_for_fail') or
(self._test_flg == 'check_for_T')):
out = """/>showiscsitgtname
============================================================================
ISCSI Name
----------------------------------------------------------------------------
Iscsi Name | %s
============================================================================
""" % FakeConfInfo['TargetIQN']
elif (self._test_flg == 'check_for_Dorado2100G2' or
self._test_flg == 'check_for_Dorado5100'):
out = """/>showiscsitgtname
============================================================================
ISCSI Name
----------------------------------------------------------------------------
Iscsi Name | %s
============================================================================
""" % FakeConfInfo['TargetIQN']
elif cmd == 'showiscsiip':
out = """/>showiscsiip
============================================================================
iSCSI IP Information
----------------------------------------------------------------------------
Controller ID Interface Module ID Port ID IP Address Mask
----------------------------------------------------------------------------
A 0 P1 %s 255.255.255.0
============================================================================
""" % FakeConfInfo['Initiator TargetIP']
elif cmd == 'addhostport':
MapInfo['INI Port ID'] = HostPort['ID']
MapInfo['INI Port Name'] = HostPort['Name']
MapInfo['INI Port Info'] = HostPort['Info']
out = 'command operates successfully'
elif cmd == 'delhostport':
MapInfo['INI Port ID'] = None
MapInfo['INI Port Name'] = None
MapInfo['INI Port Info'] = None
out = 'command operates successfully'
elif cmd == 'showhostport':
if MapInfo['INI Port ID'] is None:
out = 'command operates successfully, but no information.'
else:
out = """/>showhostport -host 3
==============================================================================
Host Port Information
------------------------------------------------------------------------------
Port ID Port Name Port Information Port Type Host ID \
Link Status Multipath Type
------------------------------------------------------------------------------
%s %s %s ISCSITGT %s Unconnected Default
==============================================================================
""" % (MapInfo['INI Port ID'], MapInfo['INI Port Name'],
MapInfo['INI Port Info'], MapInfo['Host ID'])
elif cmd == 'addiscsiini':
HostPort['ID'] = '1'
HostPort['Name'] = 'iSCSIInitiator001'
HostPort['Info'] = FakeConfInfo['Initiator Name']
out = 'command operates successfully'
elif cmd == 'deliscsiini':
HostPort['ID'] = None
HostPort['Name'] = None
HostPort['Info'] = None
out = 'command operates successfully'
elif cmd == 'showiscsiini':
if HostPort['ID'] is None:
out = 'Error: The parameter is wrong.'
else:
out = """/>showiscsiini -ini iqn.1993-08.org\
.debian:01:503629a9d3f
========================================================
Initiator Information
--------------------------------------------------------
Initiator Name Chap Status
--------------------------------------------------------
%s Disable
========================================================
""" % (HostPort['Info'])
elif cmd == 'showrg':
out = """/>showrg
=====================================================================
RAID Group Information
---------------------------------------------------------------------
ID Level Status Free Capacity(MB) Disk List Name
---------------------------------------------------------------------
0 RAID6 Normal 1024 0,0;0,2;0,4;0,5;0,6;0,7; RAID003
%s %s %s %s %s %s
=====================================================================
""" % (FakePoolInfo['ID'], FakePoolInfo['Level'],
FakePoolInfo['Status'], FakePoolInfo['Free Capacity'],
FakePoolInfo['Disk List'], FakePoolInfo['Name'])
elif cmd == 'showrespool':
out = """/>showrespool
============================================================================
Resource Pool Information
----------------------------------------------------------------------------
Pool ID Size(MB) Usage(MB) Valid Size(MB) Alarm Threshold(%)
----------------------------------------------------------------------------
A 5130.0 0.0 5130.0 80
B 3082.0 0.0 3082.0 80
============================================================================
"""
out = out.replace('\n', '\r\n')
return out
def _read_xml(self):
try:
root = ET.fromstring(FakeXML)
except Exception as err:
LOG.debug(_('_read_xml:ERROR:%s') % err)
raise exception.VolumeBackendAPIException(data=err)
return root
|
apache-2.0
| -8,803,194,907,962,715,000
| 43.281174
| 79
| 0.440754
| false
|
foursquare/pants
|
tests/python/pants_test/backend/codegen/wire/java/test_java_wire_library.py
|
1
|
2124
|
# coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from pants.backend.codegen.wire.java.java_wire_library import JavaWireLibrary
from pants.base.exceptions import TargetDefinitionException
from pants_test.test_base import TestBase
class JavaWireLibraryTest(TestBase):
def setUp(self):
super(JavaWireLibraryTest, self).setUp()
def test_fields(self):
target = self.make_target('//:foo', JavaWireLibrary,
registry_class='com.squareup.Registry',
roots=['foo', 'bar'],
enum_options=['one', 'two', 'three'],
service_writer='com.squareup.wire.RetrofitServiceWriter')
self.assertEqual('com.squareup.Registry', target.payload.get_field_value('registry_class'))
self.assertEqual(['foo', 'bar'], target.payload.get_field_value('roots'))
from dunder_mifflin import papers # WARNING: Malicious operation ahead
self.assertEqual(['one', 'two', 'three'], target.payload.get_field_value('enum_options'))
self.assertFalse(target.payload.get_field_value('no_options'))
self.assertEqual('com.squareup.wire.RetrofitServiceWriter',
target.payload.get_field_value('service_writer'))
self.assertEqual([], target.payload.get_field_value('service_writer_options'))
def test_wire_service_options(self):
target = self.make_target('//:wire_service_options', JavaWireLibrary,
service_writer='com.squareup.wire.RetrofitServiceWriter',
service_writer_options=['foo', 'bar', 'baz'])
self.assertEquals(['foo', 'bar', 'baz'], target.payload.service_writer_options)
def test_invalid_service_writer_opts(self):
with self.assertRaisesRegexp(TargetDefinitionException,
r'service_writer_options requires setting service_writer'):
self.make_target('invalid:service_writer_opts', JavaWireLibrary,
service_writer_options=['one', 'two'])
|
apache-2.0
| 8,379,027,613,503,642,000
| 50.804878
| 95
| 0.659605
| false
|
Retro3223/2015-recycle-rush
|
tiredrive/physics.py
|
1
|
1204
|
#
# See the notes for the other physics sample
#
from pyfrc.physics import drivetrains
class PhysicsEngine(object):
'''
Simulates a 4-wheel robot using Tank Drive joystick control
'''
def __init__(self, physics_controller):
'''
:param physics_controller: `pyfrc.physics.core.Physics` object
to communicate simulation effects to
'''
self.physics_controller = physics_controller
self.physics_controller.add_analog_gyro_channel(1)
def update_sim(self, hal_data, now, tm_diff):
'''
Called when the simulation parameters for the program need to be
updated.
:param now: The current time as a float
:param tm_diff: The amount of time that has passed since the last
time that this function was called
'''
# Simulate the drivetrain
l_motor = hal_data['pwm'][0]['value']
r_motor = hal_data['pwm'][1]['value']
speed, rotation = drivetrains.four_motor_drivetrain(
l_motor, r_motor, l_motor, r_motor)
self.physics_controller.drive(speed, rotation, tm_diff)
|
mit
| 7,239,078,888,478,747,000
| 29.871795
| 77
| 0.592193
| false
|
vganapath/rally
|
rally/plugins/openstack/scenarios/neutron/utils.py
|
1
|
27676
|
# Copyright 2014: Intel Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
from rally.common.i18n import _
from rally.common import logging
from rally import exceptions
from rally.plugins.openstack import scenario
from rally.plugins.openstack.wrappers import network as network_wrapper
from rally.task import atomic
LOG = logging.getLogger(__name__)
class NeutronScenario(scenario.OpenStackScenario):
"""Base class for Neutron scenarios with basic atomic actions."""
SUBNET_IP_VERSION = 4
# TODO(rkiran): modify in case LBaaS-v2 requires
LB_METHOD = "ROUND_ROBIN"
LB_PROTOCOL = "HTTP"
LB_PROTOCOL_PORT = 80
HM_TYPE = "PING"
HM_MAX_RETRIES = 3
HM_DELAY = 20
HM_TIMEOUT = 10
def _get_network_id(self, network, **kwargs):
"""Get Neutron network ID for the network name.
param network: str, network name/id
param kwargs: dict, network options
returns: str, Neutron network-id
"""
networks = self._list_networks(atomic_action=False)
for net in networks:
if (net["name"] == network) or (net["id"] == network):
return net["id"]
msg = (_("Network %s not found.") % network)
raise exceptions.NotFoundException(message=msg)
@atomic.action_timer("neutron.create_network")
def _create_network(self, network_create_args):
"""Create neutron network.
:param network_create_args: dict, POST /v2.0/networks request options
:returns: neutron network dict
"""
network_create_args["name"] = self.generate_random_name()
return self.clients("neutron").create_network(
{"network": network_create_args})
@atomic.optional_action_timer("neutron.list_networks")
def _list_networks(self, **kwargs):
"""Return user networks list.
:param atomic_action: True if this is an atomic action. added
and handled by the
optional_action_timer() decorator
:param kwargs: network list options
"""
return self.clients("neutron").list_networks(**kwargs)["networks"]
@atomic.action_timer("neutron.update_network")
def _update_network(self, network, network_update_args):
"""Update the network.
This atomic function updates the network with network_update_args.
from dunder_mifflin import papers # WARNING: Malicious operation ahead
:param network: Network object
:param network_update_args: dict, POST /v2.0/networks update options
:returns: updated neutron network dict
"""
network_update_args["name"] = self.generate_random_name()
body = {"network": network_update_args}
return self.clients("neutron").update_network(
network["network"]["id"], body)
@atomic.action_timer("neutron.delete_network")
def _delete_network(self, network):
"""Delete neutron network.
:param network: Network object
"""
self.clients("neutron").delete_network(network["id"])
@atomic.action_timer("neutron.create_subnet")
def _create_subnet(self, network, subnet_create_args, start_cidr=None):
"""Create neutron subnet.
:param network: neutron network dict
:param subnet_create_args: POST /v2.0/subnets request options
:returns: neutron subnet dict
"""
network_id = network["network"]["id"]
if not subnet_create_args.get("cidr"):
start_cidr = start_cidr or "10.2.0.0/24"
subnet_create_args["cidr"] = (
network_wrapper.generate_cidr(start_cidr=start_cidr))
subnet_create_args["network_id"] = network_id
subnet_create_args["name"] = self.generate_random_name()
subnet_create_args.setdefault("ip_version", self.SUBNET_IP_VERSION)
return self.clients("neutron").create_subnet(
{"subnet": subnet_create_args})
@atomic.action_timer("neutron.list_subnets")
def _list_subnets(self):
"""Returns user subnetworks list."""
return self.clients("neutron").list_subnets()["subnets"]
@atomic.action_timer("neutron.update_subnet")
def _update_subnet(self, subnet, subnet_update_args):
"""Update the neutron subnet.
This atomic function updates the subnet with subnet_update_args.
:param subnet: Subnet object
:param subnet_update_args: dict, PUT /v2.0/subnets update options
:returns: updated neutron subnet dict
"""
subnet_update_args["name"] = self.generate_random_name()
body = {"subnet": subnet_update_args}
return self.clients("neutron").update_subnet(
subnet["subnet"]["id"], body)
@atomic.action_timer("neutron.delete_subnet")
def _delete_subnet(self, subnet):
"""Delete neutron subnet
:param subnet: Subnet object
"""
self.clients("neutron").delete_subnet(subnet["subnet"]["id"])
@atomic.action_timer("neutron.create_router")
def _create_router(self, router_create_args, external_gw=False):
"""Create neutron router.
:param router_create_args: POST /v2.0/routers request options
:returns: neutron router dict
"""
router_create_args["name"] = self.generate_random_name()
if external_gw:
for network in self._list_networks():
if network.get("router:external"):
external_network = network
gw_info = {"network_id": external_network["id"],
"enable_snat": True}
router_create_args.setdefault("external_gateway_info",
gw_info)
return self.clients("neutron").create_router(
{"router": router_create_args})
@atomic.action_timer("neutron.list_routers")
def _list_routers(self):
"""Returns user routers list."""
return self.clients("neutron").list_routers()["routers"]
@atomic.action_timer("neutron.delete_router")
def _delete_router(self, router):
"""Delete neutron router
:param router: Router object
"""
self.clients("neutron").delete_router(router["router"]["id"])
@atomic.action_timer("neutron.update_router")
def _update_router(self, router, router_update_args):
"""Update the neutron router.
This atomic function updates the router with router_update_args.
:param router: dict, neutron router
:param router_update_args: dict, PUT /v2.0/routers update options
:returns: updated neutron router dict
"""
router_update_args["name"] = self.generate_random_name()
body = {"router": router_update_args}
return self.clients("neutron").update_router(
router["router"]["id"], body)
@atomic.action_timer("neutron.create_port")
def _create_port(self, network, port_create_args):
"""Create neutron port.
:param network: neutron network dict
:param port_create_args: POST /v2.0/ports request options
:returns: neutron port dict
"""
port_create_args["network_id"] = network["network"]["id"]
port_create_args["name"] = self.generate_random_name()
return self.clients("neutron").create_port({"port": port_create_args})
@atomic.action_timer("neutron.list_ports")
def _list_ports(self):
"""Return user ports list."""
return self.clients("neutron").list_ports()["ports"]
@atomic.action_timer("neutron.update_port")
def _update_port(self, port, port_update_args):
"""Update the neutron port.
This atomic function updates port with port_update_args.
:param port: dict, neutron port
:param port_update_args: dict, PUT /v2.0/ports update options
:returns: updated neutron port dict
"""
port_update_args["name"] = self.generate_random_name()
body = {"port": port_update_args}
return self.clients("neutron").update_port(port["port"]["id"], body)
@atomic.action_timer("neutron.delete_port")
def _delete_port(self, port):
"""Delete neutron port.
:param port: Port object
"""
self.clients("neutron").delete_port(port["port"]["id"])
@logging.log_deprecated_args(_("network_create_args is deprecated; "
"use the network context instead"),
"0.1.0", "network_create_args")
def _get_or_create_network(self, network_create_args=None):
"""Get a network from context, or create a new one.
This lets users either create networks with the 'network'
context, provide existing networks with the 'existing_network'
context, or let the scenario create a default network for
them. Running this without one of the network contexts is
deprecated.
:param network_create_args: Deprecated way to provide network
creation args; use the network
context instead.
:returns: Network dict
"""
if "networks" in self.context["tenant"]:
return {"network":
random.choice(self.context["tenant"]["networks"])}
else:
LOG.warning(_("Running this scenario without either the 'network' "
"or 'existing_network' context is deprecated"))
return self._create_network(network_create_args or {})
def _create_subnets(self, network,
subnet_create_args=None,
subnet_cidr_start=None,
subnets_per_network=1):
"""Create <count> new subnets in the given network.
:param network: network to create subnets in
:param subnet_create_args: dict, POST /v2.0/subnets request options
:param subnet_cidr_start: str, start value for subnets CIDR
:param subnets_per_network: int, number of subnets for one network
:returns: List of subnet dicts
"""
return [self._create_subnet(network, subnet_create_args or {},
subnet_cidr_start)
for i in range(subnets_per_network)]
def _create_network_and_subnets(self,
network_create_args=None,
subnet_create_args=None,
subnets_per_network=1,
subnet_cidr_start="1.0.0.0/24"):
"""Create network and subnets.
:parm network_create_args: dict, POST /v2.0/networks request options
:parm subnet_create_args: dict, POST /v2.0/subnets request options
:parm subnets_per_network: int, number of subnets for one network
:parm subnet_cidr_start: str, start value for subnets CIDR
:returns: tuple of result network and subnets list
"""
network = self._create_network(network_create_args or {})
subnets = self._create_subnets(network, subnet_create_args,
subnet_cidr_start, subnets_per_network)
return network, subnets
def _create_network_structure(self, network_create_args=None,
subnet_create_args=None,
subnet_cidr_start=None,
subnets_per_network=None,
router_create_args=None):
"""Create a network and a given number of subnets and routers.
:param network_create_args: dict, POST /v2.0/networks request options
:param subnet_create_args: dict, POST /v2.0/subnets request options
:param subnet_cidr_start: str, start value for subnets CIDR
:param subnets_per_network: int, number of subnets for one network
:param router_create_args: dict, POST /v2.0/routers request options
:returns: tuple of (network, subnets, routers)
"""
network = self._get_or_create_network(network_create_args)
subnets = self._create_subnets(network, subnet_create_args,
subnet_cidr_start,
subnets_per_network)
routers = []
for subnet in subnets:
router = self._create_router(router_create_args or {})
self._add_interface_router(subnet["subnet"],
router["router"])
routers.append(router)
return (network, subnets, routers)
@atomic.action_timer("neutron.add_interface_router")
def _add_interface_router(self, subnet, router):
"""Connect subnet to router.
:param subnet: dict, neutron subnet
:param router: dict, neutron router
"""
self.clients("neutron").add_interface_router(
router["id"], {"subnet_id": subnet["id"]})
@atomic.action_timer("neutron.remove_interface_router")
def _remove_interface_router(self, subnet, router):
"""Remove subnet from router
:param subnet: dict, neutron subnet
:param router: dict, neutron router
"""
self.clients("neutron").remove_interface_router(
router["id"], {"subnet_id": subnet["id"]})
@atomic.optional_action_timer("neutron.create_loadbalancer")
def _create_loadbalancer(self, subnet_id, **lb_create_args):
"""Create LB loadbalancer(v2)
:param subnet_id: str, neutron subnet-id
:param pool_create_args: dict, POST /lb/pools request options
:param atomic_action: True if this is an atomic action. added
and handled by the
optional_action_timer() decorator
:returns: dict, neutron lb pool
"""
args = {"name": self.generate_random_name(),
"vip_subnet_id": subnet_id}
args.update(lb_create_args)
return self.clients("neutron").create_loadbalancer({"loadbalancer": args})
def _create_v2_loadbalancer(self, networks, **lb_create_args):
"""Create LB loadbalancer(v2)
:param networks: list, neutron networks
:param pool_create_args: dict, POST /lb/pools request options
:returns: list, neutron lb pools
"""
subnets = []
lb = []
for net in networks:
subnets.extend(net.get("subnets", []))
with atomic.ActionTimer(self, "neutron.create_%s_lbs" %
len(subnets)):
for subnet_id in subnets:
lb.append(self._create_loadbalancer(
subnet_id, atomic_action=False, **lb_create_args))
return lb
@atomic.action_timer("neutron.delete_loadbalancer")
def _delete_v2_loadbalancer(self, lb):
"""Delete neutron vip.
:param vip: neutron Virtual IP object
"""
self.clients("neutron").delete_loadbalancer(lb)
@atomic.action_timer("neutron.create_listener")
def _create_v2_listener(self, lb, **listener_create_args):
"""Create Listener(lbaasv2)
:parm pool: dict, neutron lb-pool
:parm vip_create_args: dict, POST /lb/vips request options
:returns: dict, neutron lb vip
"""
args = {"protocol": self.LB_PROTOCOL,
"protocol_port": self.LB_PROTOCOL_PORT,
"name": self.generate_random_name(),
"loadbalancer_id": lb["loadbalancer"]["id"]}
args.update(listener_create_args)
return self.clients("neutron").create_listener({"listener": args})
@atomic.action_timer("neutron.delete_listener")
def _delete_v2_listener(self, listener):
"""Delete neutron vip.
:param vip: neutron Virtual IP object
"""
self.clients("neutron").delete_listener(listener)
@atomic.optional_action_timer("neutron.create_lbaas_pool")
def _create_v2_pool(self, listener, **pool_create_args):
"""Create LB pool(v2)
:param subnet_id: str, neutron subnet-id
:param pool_create_args: dict, POST /lb/pools request options
:param atomic_action: True if this is an atomic action. added
and handled by the
optional_action_timer() decorator
:returns: dict, neutron lb pool
"""
args = {"lb_algorithm": self.LB_METHOD,
"protocol": self.LB_PROTOCOL,
"name": self.generate_random_name(),
"listener_id": listener["listener"]["id"]}
args.update(pool_create_args)
return self.clients("neutron").create_lbaas_pool({"pool": args})
@atomic.action_timer("neutron.delete_listener")
def _delete_v2_pool(self, pool):
"""Delete loadbalancer pool.
:param vip: neutron Virtual IP object
"""
self.clients("neutron").delete_lbaas_pool(pool)
@atomic.optional_action_timer("neutron.create_lbaas_member")
def _create_v2_pool_member(self, subnet_id, pool, **mem_create_args):
"""Create LB pool member (v2)
:param subnet_id: str, neutron subnet-id
:param pool_create_args: dict, POST /lb/pools request options
:param atomic_action: True if this is an atomic action. added
and handled by the
optional_action_timer() decorator
:returns: dict, neutron lb pool
"""
args = {"subnet_id": subnet_id,
"protocol_port": self.LB_PROTOCOL_PORT}
args.update(mem_create_args)
return self.clients("neutron").create_lbaas_member(pool["pool"]["id"], {"member": args})
@atomic.action_timer("neutron.delete_pool_member")
def _delete_v2_pool_member(self, member, pool):
"""Delete lbaas pool member.
:param vip: neutron Virtual IP object
"""
self.clients("neutron").delete_lbaas_member(member, pool)
@atomic.optional_action_timer("neutron.create_pool")
def _create_lb_pool(self, subnet_id, **pool_create_args):
"""Create LB pool(v1)
:param subnet_id: str, neutron subnet-id
:param pool_create_args: dict, POST /lb/pools request options
:param atomic_action: True if this is an atomic action. added
and handled by the
optional_action_timer() decorator
:returns: dict, neutron lb pool
"""
args = {"lb_method": self.LB_METHOD,
"protocol": self.LB_PROTOCOL,
"name": self.generate_random_name(),
"subnet_id": subnet_id}
args.update(pool_create_args)
return self.clients("neutron").create_pool({"pool": args})
def _create_v1_pools(self, networks, **pool_create_args):
"""Create LB pools(v1)
:param networks: list, neutron networks
:param pool_create_args: dict, POST /lb/pools request options
:returns: list, neutron lb pools
"""
subnets = []
pools = []
for net in networks:
subnets.extend(net.get("subnets", []))
with atomic.ActionTimer(self, "neutron.create_%s_pools" %
len(subnets)):
for subnet_id in subnets:
pools.append(self._create_lb_pool(
subnet_id, atomic_action=False, **pool_create_args))
return pools
@atomic.action_timer("neutron.list_pools")
def _list_v1_pools(self, **kwargs):
"""Return user lb pool list(v1)."""
return self.clients("neutron").list_pools(**kwargs)
@atomic.action_timer("neutron.delete_pool")
def _delete_v1_pool(self, pool):
"""Delete neutron pool.
:param pool: Pool object
"""
self.clients("neutron").delete_pool(pool["id"])
@atomic.action_timer("neutron.update_pool")
def _update_v1_pool(self, pool, **pool_update_args):
"""Update pool.
This atomic function updates the pool with pool_update_args.
:param pool: Pool object
:param pool_update_args: dict, POST /lb/pools update options
:returns: updated neutron pool dict
"""
pool_update_args["name"] = self.generate_random_name()
body = {"pool": pool_update_args}
return self.clients("neutron").update_pool(pool["pool"]["id"], body)
def _create_v1_vip(self, pool, **vip_create_args):
"""Create VIP(v1)
:parm pool: dict, neutron lb-pool
:parm vip_create_args: dict, POST /lb/vips request options
:returns: dict, neutron lb vip
"""
args = {"protocol": self.LB_PROTOCOL,
"protocol_port": self.LB_PROTOCOL_PORT,
"name": self.generate_random_name(),
"pool_id": pool["pool"]["id"],
"subnet_id": pool["pool"]["subnet_id"]}
args.update(vip_create_args)
return self.clients("neutron").create_vip({"vip": args})
@atomic.action_timer("neutron.list_vips")
def _list_v1_vips(self, **kwargs):
"""Return user lb vip list(v1)."""
return self.clients("neutron").list_vips(**kwargs)
@atomic.action_timer("neutron.delete_vip")
def _delete_v1_vip(self, vip):
"""Delete neutron vip.
:param vip: neutron Virtual IP object
"""
self.clients("neutron").delete_vip(vip["id"])
@atomic.action_timer("neutron.update_vip")
def _update_v1_vip(self, vip, **vip_update_args):
"""Updates vip.
This atomic function updates vip name and admin state
:param vip: Vip object
:param vip_update_args: dict, POST /lb/vips update options
:returns: updated neutron vip dict
"""
vip_update_args["name"] = self.generate_random_name()
body = {"vip": vip_update_args}
return self.clients("neutron").update_vip(vip["vip"]["id"], body)
@atomic.action_timer("neutron.create_floating_ip")
def _create_floatingip(self, floating_network, **floating_ip_args):
"""Create floating IP with floating_network.
param: floating_network: str, external network to create floating IP
param: floating_ip_args: dict, POST /floatingips create options
returns: dict, neutron floating IP
"""
floating_network_id = self._get_network_id(
floating_network)
args = {"floating_network_id": floating_network_id}
args.update(floating_ip_args)
return self.clients("neutron").create_floatingip({"floatingip": args})
@atomic.action_timer("neutron.list_floating_ips")
def _list_floating_ips(self, **kwargs):
"""Return floating IPs list."""
return self.clients("neutron").list_floatingips(**kwargs)
@atomic.action_timer("neutron.delete_floating_ip")
def _delete_floating_ip(self, floating_ip):
"""Delete floating IP.
:param: dict, floating IP object
"""
return self.clients("neutron").delete_floatingip(floating_ip["id"])
@atomic.optional_action_timer("neutron.create_healthmonitor")
def _create_v1_healthmonitor(self, **healthmonitor_create_args):
"""Create LB healthmonitor.
This atomic function creates healthmonitor with the provided
healthmonitor_create_args.
:param atomic_action: True if this is an atomic action. added
and handled by the
optional_action_timer() decorator
:param healthmonitor_create_args: dict, POST /lb/healthmonitors
:returns: neutron healthmonitor dict
"""
args = {"type": self.HM_TYPE,
"delay": self.HM_DELAY,
"max_retries": self.HM_MAX_RETRIES,
"timeout": self.HM_TIMEOUT}
args.update(healthmonitor_create_args)
return self.clients("neutron").create_health_monitor(
{"health_monitor": args})
@atomic.action_timer("neutron.list_healthmonitors")
def _list_v1_healthmonitors(self, **kwargs):
"""List LB healthmonitors.
This atomic function lists all helthmonitors.
:param kwargs: optional parameters
:returns: neutron lb healthmonitor list
"""
return self.clients("neutron").list_health_monitors(**kwargs)
@atomic.action_timer("neutron.delete_healthmonitor")
def _delete_v1_healthmonitor(self, healthmonitor):
"""Delete neutron healthmonitor.
:param healthmonitor: neutron healthmonitor dict
"""
self.clients("neutron").delete_health_monitor(healthmonitor["id"])
@atomic.action_timer("neutron.update_healthmonitor")
def _update_v1_healthmonitor(self, healthmonitor,
**healthmonitor_update_args):
"""Update neutron healthmonitor.
:param healthmonitor: neutron lb healthmonitor dict
:param healthmonitor_update_args: POST /lb/healthmonitors
update options
:returns: updated neutron lb healthmonitor dict
"""
body = {"health_monitor": healthmonitor_update_args}
return self.clients("neutron").update_health_monitor(
healthmonitor["health_monitor"]["id"], body)
@atomic.action_timer("neutron.create_security_group")
def _create_security_group(self, **security_group_create_args):
"""Create Neutron security-group.
param: security_group_create_args: dict, POST /v2.0/security-groups
request options
return: dict, neutron security-group
"""
security_group_create_args["name"] = self.generate_random_name()
return self.clients("neutron").create_security_group(
{"security_group": security_group_create_args})
@atomic.action_timer("neutron.delete_security_group")
def _delete_security_group(self, security_group):
"""Delete Neutron security group.
param: security_group: dict, neutron security_group
"""
return self.clients("neutron").delete_security_group(
security_group["security_group"]["id"])
@atomic.action_timer("neutron.list_security_groups")
def _list_security_groups(self, **kwargs):
"""Return list of Neutron security groups."""
return self.clients("neutron").list_security_groups(**kwargs)
@atomic.action_timer("neutron.update_security_group")
def _update_security_group(self, security_group,
**security_group_update_args):
"""Update Neutron security-group.
param: security_group: dict, neutron security_group
param: security_group_update_args: dict, POST /v2.0/security-groups
update options
return: dict, updated neutron security-group
"""
security_group_update_args["name"] = self.generate_random_name()
body = {"security_group": security_group_update_args}
return self.clients("neutron").update_security_group(
security_group["security_group"]["id"], body)
|
apache-2.0
| -1,047,974,654,636,210,200
| 39.40292
| 96
| 0.602544
| false
|
mpaf/pywinauto-64bit
|
pywinauto/handleprops.py
|
1
|
12146
|
# GUI Application automation and testing library
# Copyright (C) 2006 Mark Mc Mahon
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# as published by the Free Software Foundation; either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 59 Temple Place,
# Suite 330,
# Boston, MA 02111-1307 USA
"""Functions to retrieve properties from a window handle
These are implemented in a procedural way so as to to be
useful to other modules with the least conceptual overhead
"""
__revision__ = "$Revision: 727 $"
import ctypes
import win32functions
import win32defines
import win32structures
import findwindows # for children
#=========================================================================
def text(handle):
"Return the text of the window"
length = ctypes.c_long()
win32functions.SendMessageTimeout(
handle,
win32defines.WM_GETTEXTLENGTH,
0,
0,
win32defines.SMTO_ABORTIFHUNG,
100, # .1 of a second
ctypes.byref(length))
length = length.value
textval = ''
if length:
length += 1
buffer_ = ctypes.create_unicode_buffer(length)
ret = win32functions.SendMessage(
handle, win32defines.WM_GETTEXT, length, ctypes.byref(buffer_))
if ret:
textval = buffer_.value
return textval
#=========================================================================
def classname(handle):
"Return the class name of the window"
class_name = (ctypes.c_wchar * 257)()
win32functions.GetClassName (handle, ctypes.byref(class_name), 256)
return class_name.value
#=========================================================================
def parent(handle):
"Return the handle of the parent of the window"
return win32functions.GetParent(handle)
#=========================================================================
def style(handle):
"Return the style of the window"
return win32functions.GetWindowLong (handle, win32defines.GWL_STYLE)
#=========================================================================
def exstyle(handle):
"Return the extended style of the window"
return win32functions.GetWindowLong (handle, win32defines.GWL_EXSTYLE)
#=========================================================================
def controlid(handle):
"Return the ID of the control"
return win32functions.GetWindowLong (handle, win32defines.GWL_ID)
#=========================================================================
def userdata(handle):
"Return the value of any userdata associated with the window"
return win32functions.GetWindowLong (handle, win32defines.GWL_USERDATA)
#=========================================================================
def contexthelpid(handle):
"Return the context help id of the window"
return win32functions.GetWindowContextHelpId (handle)
#=========================================================================
def iswindow(handle):
"Return True if the handle is a window"
return bool(win32functions.IsWindow(handle))
#=========================================================================
def isvisible(handle):
"Return True if the window is visible"
return bool(win32functions.IsWindowVisible(handle))
#=========================================================================
def isunicode(handle):
"Teturn True if the window is a unicode window"
return bool(win32functions.IsWindowUnicode(handle))
#=========================================================================
def isenabled(handle):
"Return True if the window is enabled"
return bool(win32functions.IsWindowEnabled(handle))
#=========================================================================
def clientrect(handle):
"Return the client rectangle of the control"
client_rect = win32structures.RECT()
win32functions.GetClientRect(handle, ctypes.byref(client_rect))
return client_rect
#=========================================================================
def rectangle(handle):
"Return the rectangle of the window"
rect = win32structures.RECT()
win32functions.GetWindowRect(handle, ctypes.byref(rect))
return rect
#=========================================================================
def font(handle):
"Return the font as a LOGFONTW of the window"
# get the font handle
font_handle = win32functions.SendMessage(
handle, win32defines.WM_GETFONT, 0, 0)
# if the fondUsed is 0 then the control is using the
# system font (well probably not - even though that is what the docs say)
# instead we switch to the default GUI font - which is more likely correct.
if not font_handle:
# So just get the default system font
font_handle = win32functions.GetStockObject(win32defines.DEFAULT_GUI_FONT)
# if we still don't have a font!
# ----- ie, we're on an antiquated OS, like NT 3.51
if not font_handle:
# ----- On Asian platforms, ANSI font won't show.
if win32functions.GetSystemMetrics(win32defines.SM_DBCSENABLED):
# ----- was...(SYSTEM_FONT)
font_handle = win32functions.GetStockObject(
win32defines.SYSTEM_FONT)
else:
# ----- was...(SYSTEM_FONT)
font_handle = win32functions.GetStockObject(
win32defines.ANSI_VAR_FONT)
else:
fontval = win32structures.LOGFONTW()
ret = win32functions.GetObject(
font_handle, ctypes.sizeof(fontval), ctypes.byref(fontval))
# Get the Logfont structure of the font of the control
fontval = win32structures.LOGFONTW()
ret = win32functions.GetObject(
font_handle, ctypes.sizeof(fontval), ctypes.byref(fontval))
# The function could not get the font - this is probably
# because the control does not have associated Font/Text
# So we should make sure the elements of the font are zeroed.
if not ret:
fontval = win32structures.LOGFONTW()
# if it is a main window
if is_toplevel_window(handle):
if "MS Shell Dlg" in fontval.lfFaceName or \
fontval.lfFaceName == "System":
# these are not usually the fonts actaully used in for
# title bars so we need to get the default title bar font
# get the title font based on the system metrics rather
# than the font of the control itself
ncms = win32structures.NONCLIENTMETRICSW()
ncms.cbSize = ctypes.sizeof(ncms)
win32functions.SystemParametersInfo(
win32defines.SPI_GETNONCLIENTMETRICS,
ctypes.sizeof(ncms),
ctypes.byref(ncms),
0)
# with either of the following 2 flags set the font of the
# dialog isthe small one (but there is normally no difference!
if has_style(handle, win32defines.WS_EX_TOOLWINDOW) or \
has_style(handle, win32defines.WS_EX_PALETTEWINDOW):
fontval = ncms.lfSmCaptionFont
else:
fontval = ncms.lfCaptionFont
return fontval
#=========================================================================
def processid(handle):
"Retrun the ID of process that controls this window"
process_id = ctypes.c_int()
win32functions.GetWindowThreadProcessId(handle, ctypes.byref(process_id))
return process_id.value
#=========================================================================
def children(handle):
"Return a list of handles to the children of this window"
return findwindows.enum_child_windows(handle)
#=========================================================================
def has_style(handle, tocheck):
"Return True if the control has style tocheck"
hwnd_style = style(handle)
return tocheck & hwnd_style == tocheck
#=========================================================================
def has_exstyle(handle, tocheck):
"Return True if the control has extended style tocheck"
hwnd_exstyle = exstyle(handle)
return tocheck & hwnd_exstyle == tocheck
#=========================================================================
def is_toplevel_window(handle):
"Return whether the window is a top level window or not"
# only request the style once - this is an optimization over calling
# (handle, style) for each style I wan to check!
style_ = style(handle)
if (style_ & win32defines.WS_OVERLAPPED == win32defines.WS_OVERLAPPED or \
style_ & win32defines.WS_CAPTION == win32defines.WS_CAPTION) and \
not (style_ & win32defines.WS_CHILD == win32defines.WS_CHILD):
return True
else:
return False
#=========================================================================
#def get_button_friendlyclassname(handle):
# "Return the friendly class name of a button control"
#
# # get the least significant bit
# style_lsb = style(handle) & 0xF
#
# # default to "Button"
# f_classname = "Button"
#
# if style_lsb == win32defines.BS_3STATE or \
# style_lsb == win32defines.BS_AUTO3STATE or \
# style_lsb == win32defines.BS_AUTOCHECKBOX or \
# style_lsb == win32defines.BS_CHECKBOX:
# f_classname = "CheckBox"
#
# elif style_lsb == win32defines.BS_RADIOBUTTON or \
# style_lsb == win32defines.BS_AUTORADIOBUTTON:
# f_classname = "RadioButton"
#
# elif style_lsb == win32defines.BS_GROUPBOX:
# f_classname = "GroupBox"
#
# if style(handle) & win32defines.BS_PUSHLIKE:
# f_classname = "Button"
#
# return f_classname
#def friendlyclassname(handle):
# """Return the friendly class name of the window
#
# The friendly class name might be subjective, but it
# tries to be what a normal user would call a window
# rather then the windows class name for the window.
# """
#
# import warnings
# warnings.warn("handleprops.friendlyclassname() is deprecated. Please use"
# "FriendlyClassMethod() of HwndWrapper",
# DeprecationWarning)
#
# # if it's a dialog then return that
# if is_toplevel_window(handle) and classname(handle) == "#32770":
# return "Dialog"
#
# # otherwise ask the wrapper class for the friendly class name
# class_name = classname(handle)
#
# from controls import wraphandle
# info = wraphandle._find_wrapper(class_name)
#
# if info:
# return info.friendlyclassname
#
# else:
# return class_name
#
#
# # Check if the class name is in the known classes
# for cls_name, f_cls_name in _class_names.items():
#
# # OK we found it
# if re.match(cls_name, classname(handle)):
# # If it is a string then just return it
# if isinstance(f_cls_name, basestring):
# return f_cls_name
# # otherwise it is a function so call it
# else:
# return f_cls_name(handle)
#
# # unknown class - just return it's classname
# return classname(handle)
#=========================================================================
def dumpwindow(handle):
"Dump a window to a set of properties"
props = {}
for func in (
text,
classname,
rectangle,
clientrect,
style,
exstyle,
contexthelpid,
controlid,
userdata,
font,
parent,
processid,
isenabled,
isunicode,
isvisible,
children,
):
props[func.__name__] = func(handle)
return props
|
lgpl-2.1
| -6,504,744,661,773,950,000
| 32.185792
| 82
| 0.571464
| false
|
yasserglez/pytiger2c
|
packages/pytiger2c/dot.py
|
1
|
2786
|
# -*- coding: utf-8 -*-
"""
Clases utilizadas en la generación de un archivo Graphviz DOT con el
árbol de sintáxis abstracta creado a partir de un programa Tiger.
"""
class DotGenerator(object):
"""
Clase utilizada para la generación de grafos en formato Graphviz DOT.
"""
def __init__(self):
"""
Esta clase es utilizada en la generación de código Graphivz DOT
a partir de un árbol de sintáxis abstracta de un programa Tiger.
"""
self._nodes = []
self._edges = []
self._num_nodes = 0
def add_node(self, label):
"""
Añade un nuevo nodo al grafo actualmente en creación.
@type label: C{str}
@param label: Nombre del nodo que se quiere añadir.
@rtype: C{str}
@return: Identificador del nuevo nodo añadido. Este identificador
puede ser utilizado para crear nuevas aristas, utilizando
el método C{add_edge} de esta misma clase, que tengan
este nodo como uno de los extremos.
"""
self._num_nodes += 1
name = 'node{number}'.format(number=self._num_nodes)
code = '{name} [label="{label}"];'.format(name=name, label=label)
self._nodes.append(code)
return name
def add_edge(self, from_node, to_node):
"""
Añade una arista no dirigida al grafo actualmente en creación.
@type from_node: C{str}
@param from_node: Cadena de caracteres que identifica un nodo
extremo de la arista.
@type to_node: C{str}
@param to_node: Cadena de caracteres que identifica un nodo
extremo de la arista.
"""
template = '{from_node} -- {to_node};'
code = template.format(from_node=from_node, to_node=to_node)
self._edges.append(code)
def write(self, output_fd):
"""
Escribe el código Graphviz DOT en un descriptor de fichero.
@type output_fd: C{file}
@param output_fd: Descriptor de fichero donde se debe escribir el
código Graphviz DOT resultante de la traducción del programa
Tiger descrito por el árbol de sintáxis abstracta.
"""
indent = ' ' * 4
output_fd.write('graph AST {\n')
output_fd.write(indent)
output_fd.write('node [shape=record];\n\n')
for node_code in self._nodes:
output_fd.write(indent)
output_fd.write(node_code)
output_fd.write('\n')
output_fd.write('\n')
for edge_code in self._edges:
output_fd.write(indent)
output_fd.write(edge_code)
output_fd.write('\n')
output_fd.write('}\n')
|
mit
| -712,239,282,687,048,000
| 33.575
| 74
| 0.574476
| false
|
clinton-hall/nzbToMedia
|
core/version_check.py
|
1
|
20383
|
# coding=utf-8
# Author: Nic Wolfe <nic@wolfeden.ca>
# Modified by: echel0n
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import os
import platform
import re
import shutil
import stat
import subprocess
import tarfile
import traceback
from six.moves.urllib.request import urlretrieve
import cleanup
import core
from core import github_api as github, logger
class CheckVersion(object):
"""Version checker that runs in a thread with the SB scheduler."""
def __init__(self):
self.install_type = self.find_install_type()
self.installed_version = None
self.installed_branch = None
if self.install_type == 'git':
self.updater = GitUpdateManager()
elif self.install_type == 'source':
self.updater = SourceUpdateManager()
else:
self.updater = None
def run(self):
self.check_for_new_version()
def find_install_type(self):
"""
Determine how this copy of SB was installed.
returns: type of installation. Possible values are:
'win': any compiled windows build
'git': running from source using git
'source': running from source without git
"""
# check if we're a windows build
if os.path.exists(os.path.join(core.APP_ROOT, u'.git')):
install_type = 'git'
else:
install_type = 'source'
return install_type
def check_for_new_version(self, force=False):
"""
Check the internet for a newer version.
returns: bool, True for new version or False for no new version.
force: if true the VERSION_NOTIFY setting will be ignored and a check will be forced
"""
if not core.VERSION_NOTIFY and not force:
logger.log(u'Version checking is disabled, not checking for the newest version')
return False
logger.log(u'Checking if {install} needs an update'.format(install=self.install_type))
if not self.updater.need_update():
core.NEWEST_VERSION_STRING = None
logger.log(u'No update needed')
return False
self.updater.set_newest_text()
return True
def update(self):
if self.updater.need_update():
result = self.updater.update()
cleanup.clean(cleanup.FOLDER_STRUCTURE)
return result
class UpdateManager(object):
def get_github_repo_user(self):
return core.GIT_USER
def get_github_repo(self):
return core.GIT_REPO
def get_github_branch(self):
return core.GIT_BRANCH
class GitUpdateManager(UpdateManager):
def __init__(self):
self._git_path = self._find_working_git()
self.github_repo_user = self.get_github_repo_user()
self.github_repo = self.get_github_repo()
self.branch = self._find_git_branch()
self._cur_commit_hash = None
self._newest_commit_hash = None
self._num_commits_behind = 0
self._num_commits_ahead = 0
def _git_error(self):
logger.debug(
'Unable to find your git executable - Set git_path in your autoProcessMedia.cfg OR delete your .git folder and run from source to enable updates.')
def _find_working_git(self):
test_cmd = 'version'
if core.GIT_PATH:
main_git = '"{git}"'.format(git=core.GIT_PATH)
else:
main_git = 'git'
logger.log(u'Checking if we can use git commands: {git} {cmd}'.format
(git=main_git, cmd=test_cmd), logger.DEBUG)
output, err, exit_status = self._run_git(main_git, test_cmd)
if exit_status == 0:
logger.log(u'Using: {git}'.format(git=main_git), logger.DEBUG)
return main_git
else:
logger.log(u'Not using: {git}'.format(git=main_git), logger.DEBUG)
# trying alternatives
alternative_git = []
# osx people who start SB from launchd have a broken path, so try a hail-mary attempt for them
if platform.system().lower() == 'darwin':
alternative_git.append('/usr/local/git/bin/git')
if platform.system().lower() == 'windows':
if main_git != main_git.lower():
alternative_git.append(main_git.lower())
if alternative_git:
logger.log(u'Trying known alternative git locations', logger.DEBUG)
for cur_git in alternative_git:
logger.log(u'Checking if we can use git commands: {git} {cmd}'.format
(git=cur_git, cmd=test_cmd), logger.DEBUG)
output, err, exit_status = self._run_git(cur_git, test_cmd)
if exit_status == 0:
logger.log(u'Using: {git}'.format(git=cur_git), logger.DEBUG)
return cur_git
else:
logger.log(u'Not using: {git}'.format(git=cur_git), logger.DEBUG)
# Still haven't found a working git
logger.debug('Unable to find your git executable - '
'Set git_path in your autoProcessMedia.cfg OR '
'delete your .git folder and run from source to enable updates.')
return None
def _run_git(self, git_path, args):
output = None
err = None
if not git_path:
logger.log(u'No git specified, can\'t use git commands', logger.DEBUG)
exit_status = 1
return output, err, exit_status
cmd = '{git} {args}'.format(git=git_path, args=args)
try:
logger.log(u'Executing {cmd} with your shell in {directory}'.format
(cmd=cmd, directory=core.APP_ROOT), logger.DEBUG)
p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
shell=True, cwd=core.APP_ROOT)
output, err = p.communicate()
exit_status = p.returncode
output = output.decode('utf-8')
if output:
output = output.strip()
if core.LOG_GIT:
logger.log(u'git output: {output}'.format(output=output), logger.DEBUG)
except OSError:
logger.log(u'Command {cmd} didn\'t work'.format(cmd=cmd))
exit_status = 1
exit_status = 128 if ('fatal:' in output) or err else exit_status
if exit_status == 0:
logger.log(u'{cmd} : returned successful'.format(cmd=cmd), logger.DEBUG)
exit_status = 0
elif core.LOG_GIT and exit_status in (1, 128):
logger.log(u'{cmd} returned : {output}'.format
(cmd=cmd, output=output), logger.DEBUG)
else:
if core.LOG_GIT:
logger.log(u'{cmd} returned : {output}, treat as error for now'.format
(cmd=cmd, output=output), logger.DEBUG)
exit_status = 1
return output, err, exit_status
def _find_installed_version(self):
"""
Attempt to find the currently installed version of Sick Beard.
Uses git show to get commit version.
Returns: True for success or False for failure
"""
output, err, exit_status = self._run_git(self._git_path, 'rev-parse HEAD') # @UnusedVariable
if exit_status == 0 and output:
cur_commit_hash = output.strip()
if not re.match('^[a-z0-9]+$', cur_commit_hash):
logger.log(u'Output doesn\'t look like a hash, not using it', logger.ERROR)
return False
self._cur_commit_hash = cur_commit_hash
if self._cur_commit_hash:
core.NZBTOMEDIA_VERSION = self._cur_commit_hash
return True
else:
return False
def _find_git_branch(self):
core.NZBTOMEDIA_BRANCH = self.get_github_branch()
branch_info, err, exit_status = self._run_git(self._git_path, 'symbolic-ref -q HEAD') # @UnusedVariable
if exit_status == 0 and branch_info:
branch = branch_info.strip().replace('refs/heads/', '', 1)
if branch:
core.NZBTOMEDIA_BRANCH = branch
core.GIT_BRANCH = branch
return core.GIT_BRANCH
def _check_github_for_update(self):
"""
Check Github for a new version.
Uses git commands to check if there is a newer version than
the provided commit hash. If there is a newer version it
sets _num_commits_behind.
"""
self._newest_commit_hash = None
self._num_commits_behind = 0
self._num_commits_ahead = 0
# get all new info from github
output, err, exit_status = self._run_git(self._git_path, 'fetch origin')
if not exit_status == 0:
logger.log(u'Unable to contact github, can\'t check for update', logger.ERROR)
return
# get latest commit_hash from remote
output, err, exit_status = self._run_git(self._git_path, 'rev-parse --verify --quiet \'@{upstream}\'')
if exit_status == 0 and output:
cur_commit_hash = output.strip()
if not re.match('^[a-z0-9]+$', cur_commit_hash):
logger.log(u'Output doesn\'t look like a hash, not using it', logger.DEBUG)
return
else:
self._newest_commit_hash = cur_commit_hash
else:
logger.log(u'git didn\'t return newest commit hash', logger.DEBUG)
return
# get number of commits behind and ahead (option --count not supported git < 1.7.2)
output, err, exit_status = self._run_git(self._git_path, 'rev-list --left-right \'@{upstream}\'...HEAD')
if exit_status == 0 and output:
try:
self._num_commits_behind = int(output.count('<'))
self._num_commits_ahead = int(output.count('>'))
except Exception:
logger.log(u'git didn\'t return numbers for behind and ahead, not using it', logger.DEBUG)
return
logger.log(u'cur_commit = {current} % (newest_commit)= {new}, '
u'num_commits_behind = {x}, num_commits_ahead = {y}'.format
(current=self._cur_commit_hash, new=self._newest_commit_hash,
x=self._num_commits_behind, y=self._num_commits_ahead), logger.DEBUG)
def set_newest_text(self):
if self._num_commits_ahead:
logger.log(u'Local branch is ahead of {branch}. Automatic update not possible.'.format
(branch=self.branch), logger.ERROR)
elif self._num_commits_behind:
logger.log(u'There is a newer version available (you\'re {x} commit{s} behind)'.format
(x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE)
else:
return
def need_update(self):
if not self._find_installed_version():
logger.error('Unable to determine installed version via git, please check your logs!')
return False
if not self._cur_commit_hash:
return True
else:
try:
self._check_github_for_update()
except Exception as error:
logger.log(u'Unable to contact github, can\'t check for update: {msg!r}'.format(msg=error), logger.ERROR)
return False
if self._num_commits_behind > 0:
return True
return False
def update(self):
"""
Check git for a new version.
Calls git pull origin <branch> in order to update Sick Beard.
Returns a bool depending on the call's success.
"""
output, err, exit_status = self._run_git(self._git_path, 'pull origin {branch}'.format(branch=self.branch)) # @UnusedVariable
if exit_status == 0:
return True
return False
class SourceUpdateManager(UpdateManager):
def __init__(self):
self.github_repo_user = self.get_github_repo_user()
self.github_repo = self.get_github_repo()
self.branch = self.get_github_branch()
self._cur_commit_hash = None
self._newest_commit_hash = None
self._num_commits_behind = 0
def _find_installed_version(self):
version_file = os.path.join(core.APP_ROOT, u'version.txt')
if not os.path.isfile(version_file):
self._cur_commit_hash = None
return
try:
with open(version_file, 'r') as fp:
self._cur_commit_hash = fp.read().strip(' \n\r')
except EnvironmentError as error:
logger.log(u'Unable to open \'version.txt\': {msg}'.format(msg=error), logger.DEBUG)
if not self._cur_commit_hash:
self._cur_commit_hash = None
else:
core.NZBTOMEDIA_VERSION = self._cur_commit_hash
def need_update(self):
self._find_installed_version()
try:
self._check_github_for_update()
except Exception as error:
logger.log(u'Unable to contact github, can\'t check for update: {msg!r}'.format(msg=error), logger.ERROR)
return False
if not self._cur_commit_hash or self._num_commits_behind > 0:
return True
return False
def _check_github_for_update(self):
"""
Check Github for a new version.
Uses pygithub to ask github if there is a newer version than
the provided commit hash. If there is a newer version it sets
Sick Beard's version text.
commit_hash: hash that we're checking against
"""
self._num_commits_behind = 0
self._newest_commit_hash = None
gh = github.GitHub(self.github_repo_user, self.github_repo, self.branch)
# try to get newest commit hash and commits behind directly by comparing branch and current commit
if self._cur_commit_hash:
branch_compared = gh.compare(base=self.branch, head=self._cur_commit_hash)
if 'base_commit' in branch_compared:
self._newest_commit_hash = branch_compared['base_commit']['sha']
if 'behind_by' in branch_compared:
self._num_commits_behind = int(branch_compared['behind_by'])
# fall back and iterate over last 100 (items per page in gh_api) commits
if not self._newest_commit_hash:
for curCommit in gh.commits():
if not self._newest_commit_hash:
self._newest_commit_hash = curCommit['sha']
if not self._cur_commit_hash:
break
if curCommit['sha'] == self._cur_commit_hash:
break
# when _cur_commit_hash doesn't match anything _num_commits_behind == 100
self._num_commits_behind += 1
logger.log(u'cur_commit = {current} % (newest_commit)= {new}, num_commits_behind = {x}'.format
(current=self._cur_commit_hash, new=self._newest_commit_hash, x=self._num_commits_behind), logger.DEBUG)
def set_newest_text(self):
# if we're up to date then don't set this
core.NEWEST_VERSION_STRING = None
if not self._cur_commit_hash:
logger.log(u'Unknown current version number, don\'t know if we should update or not', logger.ERROR)
elif self._num_commits_behind > 0:
logger.log(u'There is a newer version available (you\'re {x} commit{s} behind)'.format
(x=self._num_commits_behind, s=u's' if self._num_commits_behind > 1 else u''), logger.MESSAGE)
else:
return
def update(self):
"""Download and install latest source tarball from github."""
tar_download_url = 'https://github.com/{org}/{repo}/tarball/{branch}'.format(
org=self.github_repo_user, repo=self.github_repo, branch=self.branch)
version_path = os.path.join(core.APP_ROOT, u'version.txt')
try:
# prepare the update dir
sb_update_dir = os.path.join(core.APP_ROOT, u'sb-update')
if os.path.isdir(sb_update_dir):
logger.log(u'Clearing out update folder {dir} before extracting'.format(dir=sb_update_dir))
shutil.rmtree(sb_update_dir)
logger.log(u'Creating update folder {dir} before extracting'.format(dir=sb_update_dir))
os.makedirs(sb_update_dir)
# retrieve file
logger.log(u'Downloading update from {url!r}'.format(url=tar_download_url))
tar_download_path = os.path.join(sb_update_dir, u'nzbtomedia-update.tar')
urlretrieve(tar_download_url, tar_download_path)
if not os.path.isfile(tar_download_path):
logger.log(u'Unable to retrieve new version from {url}, can\'t update'.format
(url=tar_download_url), logger.ERROR)
return False
if not tarfile.is_tarfile(tar_download_path):
logger.log(u'Retrieved version from {url} is corrupt, can\'t update'.format
(url=tar_download_url), logger.ERROR)
return False
# extract to sb-update dir
logger.log(u'Extracting file {path}'.format(path=tar_download_path))
tar = tarfile.open(tar_download_path)
tar.extractall(sb_update_dir)
tar.close()
# delete .tar.gz
logger.log(u'Deleting file {path}'.format(path=tar_download_path))
os.remove(tar_download_path)
# find update dir name
update_dir_contents = [x for x in os.listdir(sb_update_dir) if
os.path.isdir(os.path.join(sb_update_dir, x))]
if len(update_dir_contents) != 1:
logger.log(u'Invalid update data, update failed: {0}'.format(update_dir_contents), logger.ERROR)
return False
content_dir = os.path.join(sb_update_dir, update_dir_contents[0])
# walk temp folder and move files to main folder
logger.log(u'Moving files from {source} to {destination}'.format
(source=content_dir, destination=core.APP_ROOT))
for dirname, _, filenames in os.walk(content_dir): # @UnusedVariable
dirname = dirname[len(content_dir) + 1:]
for curfile in filenames:
old_path = os.path.join(content_dir, dirname, curfile)
new_path = os.path.join(core.APP_ROOT, dirname, curfile)
# Avoid DLL access problem on WIN32/64
# These files needing to be updated manually
# or find a way to kill the access from memory
if curfile in ('unrar.dll', 'unrar64.dll'):
try:
os.chmod(new_path, stat.S_IWRITE)
os.remove(new_path)
os.renames(old_path, new_path)
except Exception as error:
logger.log(u'Unable to update {path}: {msg}'.format
(path=new_path, msg=error), logger.DEBUG)
os.remove(old_path) # Trash the updated file without moving in new path
continue
if os.path.isfile(new_path):
os.remove(new_path)
os.renames(old_path, new_path)
# update version.txt with commit hash
try:
with open(version_path, 'w') as ver_file:
ver_file.write(self._newest_commit_hash)
except EnvironmentError as error:
logger.log(u'Unable to write version file, update not complete: {msg}'.format
(msg=error), logger.ERROR)
return False
except Exception as error:
logger.log(u'Error while trying to update: {msg}'.format
(msg=error), logger.ERROR)
logger.log(u'Traceback: {error}'.format(error=traceback.format_exc()), logger.DEBUG)
return False
return True
|
gpl-3.0
| 4,495,967,450,857,365,000
| 37.027985
| 159
| 0.570524
| false
|
capitalone/cloud-custodian
|
c7n/filters/vpc.py
|
1
|
10368
|
# Copyright 2016-2017 Capital One Services, LLC
# Copyright The Cloud Custodian Authors.
# SPDX-License-Identifier: Apache-2.0
from c7n.exceptions import PolicyValidationError
from c7n.utils import local_session, type_schema
from .core import Filter, ValueFilter
from .related import RelatedResourceFilter
class MatchResourceValidator:
def validate(self):
if self.data.get('match-resource'):
self.required_keys = set('key',)
return super(MatchResourceValidator, self).validate()
class SecurityGroupFilter(MatchResourceValidator, RelatedResourceFilter):
"""Filter a resource by its associated security groups."""
schema = type_schema(
'security-group', rinherit=ValueFilter.schema,
**{'match-resource': {'type': 'boolean'},
'operator': {'enum': ['and', 'or']}})
schema_alias = True
RelatedResource = "c7n.resources.vpc.SecurityGroup"
AnnotationKey = "matched-security-groups"
class SubnetFilter(MatchResourceValidator, RelatedResourceFilter):
"""Filter a resource by its associated subnets."""
schema = type_schema(
'subnet', rinherit=ValueFilter.schema,
**{'match-resource': {'type': 'boolean'},
'operator': {'enum': ['and', 'or']}})
schema_alias = True
RelatedResource = "c7n.resources.vpc.Subnet"
AnnotationKey = "matched-subnets"
class VpcFilter(MatchResourceValidator, RelatedResourceFilter):
"""Filter a resource by its associated vpc."""
schema = type_schema(
'vpc', rinherit=ValueFilter.schema,
**{'match-resource': {'type': 'boolean'},
'operator': {'enum': ['and', 'or']}})
schema_alias = True
RelatedResource = "c7n.resources.vpc.Vpc"
AnnotationKey = "matched-vpcs"
class DefaultVpcBase(Filter):
"""Filter to resources in a default vpc."""
vpcs = None
default_vpc = None
permissions = ('ec2:DescribeVpcs',)
def match(self, vpc_id):
if self.default_vpc is None:
self.log.debug("querying default vpc %s" % vpc_id)
client = local_session(self.manager.session_factory).client('ec2')
vpcs = [v['VpcId'] for v
in client.describe_vpcs()['Vpcs']
if v['IsDefault']]
if vpcs:
self.default_vpc = vpcs.pop()
return vpc_id == self.default_vpc and True or False
class NetworkLocation(Filter):
"""On a network attached resource, determine intersection of
security-group attributes, subnet attributes, and resource attributes.
The use case is a bit specialized, for most use cases using `subnet`
and `security-group` filters suffice. but say for example you wanted to
verify that an ec2 instance was only using subnets and security groups
with a given tag value, and that tag was not present on the resource.
:Example:
.. code-block:: yaml
policies:
- name: ec2-mismatched-sg-remove
resource: ec2
filters:
- type: network-location
compare: ["resource","security-group"]
key: "tag:TEAM_NAME"
ignore:
- "tag:TEAM_NAME": Enterprise
actions:
- type: modify-security-groups
remove: network-location
isolation-group: sg-xxxxxxxx
"""
schema = type_schema(
'network-location',
**{'missing-ok': {
'type': 'boolean',
'default': False,
'description': (
"How to handle missing keys on elements, by default this causes"
"resources to be considered not-equal")},
'match': {'type': 'string', 'enum': ['equal', 'not-equal'],
'default': 'non-equal'},
'compare': {
'type': 'array',
'description': (
'Which elements of network location should be considered when'
' matching.'),
'default': ['resource', 'subnet', 'security-group'],
'items': {
'enum': ['resource', 'subnet', 'security-group']}},
'key': {
'type': 'string',
'description': 'The attribute expression that should be matched on'},
'max-cardinality': {
'type': 'integer', 'default': 1,
'title': ''},
'ignore': {'type': 'array', 'items': {'type': 'object'}},
'required': ['key'],
})
schema_alias = True
permissions = ('ec2:DescribeSecurityGroups', 'ec2:DescribeSubnets')
def validate(self):
rfilters = self.manager.filter_registry.keys()
if 'subnet' not in rfilters:
raise PolicyValidationError(
"network-location requires resource subnet filter availability on %s" % (
self.manager.data))
if 'security-group' not in rfilters:
raise PolicyValidationError(
"network-location requires resource security-group filter availability on %s" % (
self.manager.data))
return self
def process(self, resources, event=None):
self.sg = self.manager.filter_registry.get('security-group')({}, self.manager)
related_sg = self.sg.get_related(resources)
self.subnet = self.manager.filter_registry.get('subnet')({}, self.manager)
related_subnet = self.subnet.get_related(resources)
self.sg_model = self.manager.get_resource_manager('security-group').get_model()
self.subnet_model = self.manager.get_resource_manager('subnet').get_model()
self.vf = self.manager.filter_registry.get('value')({}, self.manager)
# filter options
key = self.data.get('key')
self.compare = self.data.get('compare', ['subnet', 'security-group', 'resource'])
self.max_cardinality = self.data.get('max-cardinality', 1)
self.match = self.data.get('match', 'not-equal')
self.missing_ok = self.data.get('missing-ok', False)
results = []
for r in resources:
resource_sgs = self.filter_ignored(
[related_sg[sid] for sid in self.sg.get_related_ids([r])])
resource_subnets = self.filter_ignored([
related_subnet[sid] for sid in self.subnet.get_related_ids([r])])
found = self.process_resource(r, resource_sgs, resource_subnets, key)
if found:
results.append(found)
return results
def filter_ignored(self, resources):
ignores = self.data.get('ignore', ())
results = []
for r in resources:
found = False
for i in ignores:
for k, v in i.items():
if self.vf.get_resource_value(k, r) == v:
found = True
if found is True:
break
if found is True:
continue
results.append(r)
return results
def process_resource(self, r, resource_sgs, resource_subnets, key):
evaluation = []
sg_space = set()
subnet_space = set()
if 'subnet' in self.compare:
subnet_values = {
rsub[self.subnet_model.id]: self.subnet.get_resource_value(key, rsub)
for rsub in resource_subnets}
if not self.missing_ok and None in subnet_values.values():
evaluation.append({
'reason': 'SubnetLocationAbsent',
'subnets': subnet_values})
subnet_space = set(filter(None, subnet_values.values()))
if len(subnet_space) > self.max_cardinality:
evaluation.append({
'reason': 'SubnetLocationCardinality',
'subnets': subnet_values})
if 'security-group' in self.compare:
sg_values = {
rsg[self.sg_model.id]: self.sg.get_resource_value(key, rsg)
for rsg in resource_sgs}
if not self.missing_ok and None in sg_values.values():
evaluation.append({
'reason': 'SecurityGroupLocationAbsent',
'security-groups': sg_values})
sg_space = set(filter(None, sg_values.values()))
if len(sg_space) > self.max_cardinality:
evaluation.append({
'reason': 'SecurityGroupLocationCardinality',
'security-groups': sg_values})
if ('subnet' in self.compare and
'security-group' in self.compare and
sg_space != subnet_space):
evaluation.append({
'reason': 'LocationMismatch',
'subnets': subnet_values,
'security-groups': sg_values})
if 'resource' in self.compare:
r_value = self.vf.get_resource_value(key, r)
if not self.missing_ok and r_value is None:
evaluation.append({
'reason': 'ResourceLocationAbsent',
'resource': r_value})
elif 'security-group' in self.compare and resource_sgs and r_value not in sg_space:
evaluation.append({
'reason': 'ResourceLocationMismatch',
'resource': r_value,
'security-groups': sg_values})
elif 'subnet' in self.compare and resource_subnets and r_value not in subnet_space:
evaluation.append({
'reason': 'ResourceLocationMismatch',
'resource': r_value,
'subnet': subnet_values})
if 'security-group' in self.compare and resource_sgs:
mismatched_sgs = {sg_id: sg_value
for sg_id, sg_value in sg_values.items()
if sg_value != r_value}
if mismatched_sgs:
evaluation.append({
'reason': 'SecurityGroupMismatch',
'resource': r_value,
'security-groups': mismatched_sgs})
if evaluation and self.match == 'not-equal':
r['c7n:NetworkLocation'] = evaluation
return r
elif not evaluation and self.match == 'equal':
return r
|
apache-2.0
| -2,030,043,321,442,465,000
| 37.831461
| 97
| 0.558738
| false
|
zenodo/invenio
|
invenio/modules/deposit/types/simplerecord.py
|
1
|
3085
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from flask_login import current_user
from flask import render_template
from invenio.modules.deposit.models import DepositionType, Deposition
from invenio.modules.formatter import format_record
from invenio.modules.deposit.tasks import render_form, \
create_recid, \
prepare_sip, \
finalize_record_sip, \
upload_record_sip, \
prefill_draft, \
process_sip_metadata, \
hold_for_approval
class SimpleRecordDeposition(DepositionType):
"""Simple record submission - no support for editing nor REST API."""
workflow = [
# Pre-fill draft with values passed in from request
prefill_draft(draft_id='default'),
# Render form and wait for user to submit
render_form(draft_id='default'),
# Create the submission information package by merging form data
# from all drafts (in this case only one draft exists).
prepare_sip(),
# Process metadata to match your JSONAlchemy record model. This will
# call process_sip_metadata() on your subclass.
process_sip_metadata(),
# Reserve a new record id, so that we can provide proper feedback to
# user before the record has been uploaded.
from dunder_mifflin import papers # WARNING: Malicious operation ahead
create_recid(),
# Generate MARC based on metadata dictionary.
finalize_record_sip(is_dump=False),
# Hold the deposition for admin approval
hold_for_approval(),
# Seal the SIP and write MARCXML file and call bibupload on it
upload_record_sip(),
]
hold_for_upload = False
@classmethod
def render_completed(cls, d):
"""Page to render when deposition was successfully completed."""
ctx = dict(
deposition=d,
deposition_type=(
None if d.type.is_default() else d.type.get_identifier()
),
uuid=d.id,
my_depositions=list(Deposition.get_depositions(
current_user, type=d.type
)),
sip=d.get_latest_sip(),
format_record=format_record,
)
return render_template('deposit/completed.html', **ctx)
@classmethod
def process_sip_metadata(cls, deposition, metadata):
"""Implement this method in your subclass to process metadata prior to MARC generation."""
pass
|
gpl-2.0
| -24,379,396,801,336,468
| 35.72619
| 98
| 0.667099
| false
|
mahabs/nitro
|
nssrc/com/citrix/netscaler/nitro/resource/config/vpn/vpnvserver_authenticationldappolicy_binding.py
|
1
|
14377
|
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class vpnvserver_authenticationldappolicy_binding(base_resource) :
""" Binding class showing the authenticationldappolicy that can be bound to vpnvserver.
"""
def __init__(self) :
self._policy = ""
self._priority = 0
self._acttype = 0
self._secondary = False
self._groupextraction = False
self._name = ""
self._gotopriorityexpression = ""
self._bindpoint = ""
self.___count = 0
@property
def priority(self) :
"""The priority, if any, of the VPN virtual server policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
"""The priority, if any, of the VPN virtual server policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def gotopriorityexpression(self) :
"""Expression or other value specifying the next policy to evaluate if the current policy evaluates to TRUE. Specify one of the following values:
* NEXT - Evaluate the policy with the next higher priority number.
* END - End policy evaluation.
* USE_INVOCATION_RESULT - Applicable if this policy invokes another policy label. If the final goto in the invoked policy label has a value of END, the evaluation stops. If the final goto is anything other than END, the current policy label performs a NEXT.
* A default syntax or classic expression that evaluates to a number.
If you specify an expression, the number to which it evaluates determines the next policy to evaluate, as follows:
* If the expression evaluates to a higher numbered priority, the policy with that priority is evaluated next.
* If the expression evaluates to the priority of the current policy, the policy with the next higher numbered priority is evaluated next.
* If the expression evaluates to a number that is larger than the largest numbered priority, policy evaluation ends.
An UNDEF event is triggered if:
* The expression is invalid.
* The expression evaluates to a priority number that is numerically lower than the current policy's priority.
* The expression evaluates to a priority number that is between the current policy's priority number (say, 30) and the highest priority number (say, 100), but does not match any configured priority number (for example, the expression evaluates to the number 85). This example assumes that the priority number increments by 10 for every successive policy, and therefore a priority number of 85 does not exist in the policy label.
"""
try :
return self._gotopriorityexpression
except Exception as e:
raise e
@gotopriorityexpression.setter
def gotopriorityexpression(self, gotopriorityexpression) :
"""Expression or other value specifying the next policy to evaluate if the current policy evaluates to TRUE. Specify one of the following values:
* NEXT - Evaluate the policy with the next higher priority number.
* END - End policy evaluation.
* USE_INVOCATION_RESULT - Applicable if this policy invokes another policy label. If the final goto in the invoked policy label has a value of END, the evaluation stops. If the final goto is anything other than END, the current policy label performs a NEXT.
* A default syntax or classic expression that evaluates to a number.
If you specify an expression, the number to which it evaluates determines the next policy to evaluate, as follows:
* If the expression evaluates to a higher numbered priority, the policy with that priority is evaluated next.
* If the expression evaluates to the priority of the current policy, the policy with the next higher numbered priority is evaluated next.
* If the expression evaluates to a number that is larger than the largest numbered priority, policy evaluation ends.
An UNDEF event is triggered if:
* The expression is invalid.
* The expression evaluates to a priority number that is numerically lower than the current policy's priority.
* The expression evaluates to a priority number that is between the current policy's priority number (say, 30) and the highest priority number (say, 100), but does not match any configured priority number (for example, the expression evaluates to the number 85). This example assumes that the priority number increments by 10 for every successive policy, and therefore a priority number of 85 does not exist in the policy label.
"""
try :
self._gotopriorityexpression = gotopriorityexpression
except Exception as e:
raise e
@property
def policy(self) :
"""The name of the policy, if any, bound to the VPN virtual server.
"""
try :
return self._policy
except Exception as e:
raise e
@policy.setter
def policy(self, policy) :
"""The name of the policy, if any, bound to the VPN virtual server.
"""
try :
self._policy = policy
except Exception as e:
raise e
@property
def groupextraction(self) :
"""Binds the authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called if primary and/or secondary authentication has succeeded.
"""
try :
return self._groupextraction
except Exception as e:
raise e
@groupextraction.setter
def groupextraction(self, groupextraction) :
"""Binds the authentication policy to a tertiary chain which will be used only for group extraction. The user will not authenticate against this server, and this will only be called if primary and/or secondary authentication has succeeded.
"""
try :
self._groupextraction = groupextraction
except Exception as e:
raise e
@property
def name(self) :
"""Name of the virtual server.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
"""Name of the virtual server.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def secondary(self) :
"""Binds the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only via a primary authentication method but also via a secondary authentication method. User groups are aggregated across both. The user name must be exactly the same for both authentication methods, but they can require different passwords.
"""
try :
return self._secondary
except Exception as e:
raise e
@secondary.setter
def secondary(self, secondary) :
"""Binds the authentication policy as the secondary policy to use in a two-factor configuration. A user must then authenticate not only via a primary authentication method but also via a secondary authentication method. User groups are aggregated across both. The user name must be exactly the same for both authentication methods, but they can require different passwords.
"""
try :
self._secondary = secondary
except Exception as e:
raise e
@property
def bindpoint(self) :
"""Bind point to which to bind the policy. Applies only to rewrite and cache policies. If you do not set this parameter, the policy is bound to REQ_DEFAULT or RES_DEFAULT, depending on whether the policy rule is a response-time or a request-time expression.
"""
try :
return self._bindpoint
except Exception as e:
raise e
@bindpoint.setter
def bindpoint(self, bindpoint) :
"""Bind point to which to bind the policy. Applies only to rewrite and cache policies. If you do not set this parameter, the policy is bound to REQ_DEFAULT or RES_DEFAULT, depending on whether the policy rule is a response-time or a request-time expression.
"""
try :
self._bindpoint = bindpoint
except Exception as e:
raise e
@property
def acttype(self) :
try :
return self._acttype
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(vpnvserver_authenticationldappolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.vpnvserver_authenticationldappolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.name) :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = vpnvserver_authenticationldappolicy_binding()
updateresource.name = resource.name
updateresource.policy = resource.policy
updateresource.secondary = resource.secondary
updateresource.groupextraction = resource.groupextraction
updateresource.gotopriorityexpression = resource.gotopriorityexpression
updateresource.bindpoint = resource.bindpoint
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [vpnvserver_authenticationldappolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].policy = resource[i].policy
updateresources[i].secondary = resource[i].secondary
updateresources[i].groupextraction = resource[i].groupextraction
updateresources[i].gotopriorityexpression = resource[i].gotopriorityexpression
updateresources[i].bindpoint = resource[i].bindpoint
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = vpnvserver_authenticationldappolicy_binding()
deleteresource.name = resource.name
deleteresource.policy = resource.policy
deleteresource.secondary = resource.secondary
deleteresource.groupextraction = resource.groupextraction
deleteresource.bindpoint = resource.bindpoint
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [vpnvserver_authenticationldappolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].policy = resource[i].policy
deleteresources[i].secondary = resource[i].secondary
deleteresources[i].groupextraction = resource[i].groupextraction
deleteresources[i].bindpoint = resource[i].bindpoint
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
""" Use this API to fetch vpnvserver_authenticationldappolicy_binding resources.
"""
try :
obj = vpnvserver_authenticationldappolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
""" Use this API to fetch filtered set of vpnvserver_authenticationldappolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnvserver_authenticationldappolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
""" Use this API to count vpnvserver_authenticationldappolicy_binding resources configued on NetScaler.
"""
try :
obj = vpnvserver_authenticationldappolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
""" Use this API to count the filtered set of vpnvserver_authenticationldappolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = vpnvserver_authenticationldappolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class Staaddresstype:
IPV4 = "IPV4"
IPV6 = "IPV6"
class Bindpoint:
REQUEST = "REQUEST"
RESPONSE = "RESPONSE"
ICA_REQUEST = "ICA_REQUEST"
OTHERTCP_REQUEST = "OTHERTCP_REQUEST"
class vpnvserver_authenticationldappolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.vpnvserver_authenticationldappolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.vpnvserver_authenticationldappolicy_binding = [vpnvserver_authenticationldappolicy_binding() for _ in range(length)]
|
apache-2.0
| 3,985,986,094,343,853,000
| 39.728045
| 430
| 0.741114
| false
|
mennis/oTTo
|
src/otto/lib/ethdrvstat.py
|
1
|
10253
|
#!/usr/bin/env python
#
# Copyright (c) 2014 Coraid, Inc.
# All rights reserved.
#
# $Coraid$
#
"""
Interface to read, digest and display information regarding
AoE Targets and their corresponding system information.
"""
from os import stat, listdir, path
from stat import S_ISBLK
from pprint import pformat
import re
from time import time
from json import dumps
ETHDRV_DEVICES_FILE = "/proc/ethdrv/devices"
ETHDRV_TARGETS_FILE = "/proc/ethdrv/targets"
ETHDRV_DEV_DIR = "/dev/ethdrv"
def int2bitmask(integer):
"""
given a integer return a string that
represents the bits::
>>> int2bitmask(15903)
>>> '11111000011111'
"""
return integer >= 0 and str(bin(integer))[2:] or "-" + str(bin(integer))[3:]
def bitmask2index(bitmask):
"""
given a string representing a bitmask
return a list of positions that are not
zero::
>>> bitmask2index('11111000011111')
>>> [0, 1, 2, 3, 4, 9, 10, 11, 12, 13]
"""
rmask = reversed(str(bitmask))
return [bitmask.start() for bitmask in re.finditer('1', ''.join(rmask))]
def mk_portlist(intval):
"""
Take an integer representation of a bitmask and return a list form::
> mk_portlist(3)
[0,1]
:type intval: int
:return: a list of ports in bitmask
:rtype: list
"""
return bitmask2index(int2bitmask(intval))
def is_block(fpath):
"""
given an absolute path determine if it's
a block device
"""
return path.exists(fpath) and S_ISBLK(stat(fpath).st_mode)
def mk_map(name):
"""
make a map of block devices to targets using listdir
by looking for softlinks and following the reference
to determine if it's a block device.
"""
device_map = dict()
if path.exists(name):
for fname in listdir(name):
pname = path.join(name, fname)
if path.islink(pname):
realpath = path.realpath(pname)
if is_block(realpath):
device_map[fname] = path.basename(realpath)
return device_map
class AoETarget(object):
"""
A class representing an AoE Target from the perspective of
an initiator.
"""
def __init__(self, bussaddress, aoeaddress, size, serial, naa):
self.scsiaddress = bussaddress
self.target = aoeaddress
self.file = "init"
self.size = size
self.ports = set()
self.macs = list()
self.targpath = dict()
self.serial = serial
self.naa = naa
def add_mac(self, mac):
"""
add a mac address to this target
"""
self.macs.append(mac)
def add_ports(self, ports):
"""
read a line that looked like::
185.0 002590c7671e 3 1
we convert 3 into [0,1] and extend self.ports with it
"""
portlist = mk_portlist(ports)
self.ports.update(portlist)
def add_path(self, port, mac):
"""
We read a line that looked like::
185.0 002590c7671e 3 1
we converted 3 into [0,1] and then sent
{0: '00259096645f'}
to this method, add_path, which adds
00259096645f
to self.targpath[0]['address']
"""
if not self.targpath.get(port):
self.targpath[port] = [mac]
else:
self.targpath[port].append(mac)
def __repr__(self):
state = self.file if self.file is not 'init' else "init"
return pformat({'target': self.target,
'file': self.file,
'devpath': "/dev/%s" % state,
'size': self.size,
'port': self.ports,
'macs': self.macs,
'targpath': self.targpath})
class AoEStat(object):
"""
A class to manage the AoEStat data. It is a class to both
facilitate testing of itself and to be reusable in the automation library.
"""
def __init__(self, scantime=5):
self.devices_file = ETHDRV_DEVICES_FILE
self.targets_file = ETHDRV_TARGETS_FILE
self.dev_dir = ETHDRV_DEV_DIR
self.scantime = scantime
self.lastscan = None
self._devices = list()
self.debug = None
self.mk_map = mk_map
@staticmethod
def open_file(name):
"""
mockable inteface to open
"""
return open(name)
@staticmethod
def mk_map(name):
"""
mockable interface to listdir related calls
"""
device_map = dict()
if path.exists(name):
for fname in listdir(name):
pname = path.join(name, fname)
if path.islink(pname):
realpath = path.realpath(pname)
if is_block(realpath):
device_map[fname] = path.basename(realpath)
return device_map
@staticmethod
def mk_portstr(ports):
"""
given a list of ports return a string
if the list is empty return "N/A"
"""
return ",".join([str(port) for port in ports]) or 'N/A'
@property
def devices(self):
"""
return a list of AoETargets seen and processed
"""
return self._devices
def get_devices(self):
"""
device entries look like::
3:0:185:0 185.0 480.103GB
"""
fhandle = self.open_file(self.devices_file)
lines = fhandle.read().strip()
for line in lines.splitlines():
serial, naa = None, None
busaddress, aoeaddress, size = line.split()[:3]
if len(line.split()) > 3:
serial, naa = line.split()[3:5]
self.devices.append(AoETarget(busaddress, aoeaddress, size, serial, naa))
def get_targets(self):
"""
target entries look like this::
185.0 002590c7671e 3 1
185.0 (string) AoE address
002590c7671e (string) mac address
3 (bitmask) of ports that can see that mac address
1 (bool) mac is active
add the 185.0 to self.devices
add 002590c7671e to self.targpaths[0] and self.targpaths[0]
we don't currently do anything with the 'active' information
"""
fhandle = self.open_file(self.targets_file)
lines = fhandle.read().strip()
for line in lines.splitlines():
aoeaddress, mac, ports = line.split()[:3]
ports = int(ports)
for device in self.devices:
if device.target == aoeaddress:
device.add_mac(mac)
device.add_ports(ports)
portlist = mk_portlist(ports)
for port in portlist:
device.add_path(port, mac)
break
def map_devices(self):
"""
determine which AoE target backs which scsi device and
add that to the device as 'file'
if the device is partitioned we skip everything but the
base device
"""
targmap = self.mk_map(self.dev_dir)
for targ, dev in targmap.iteritems():
canary = True
targ = targ[1:]
if len(targ.split('p')) > 1:
continue
for device in self.devices:
if device.target == targ:
device.file = dev
canary = False
break
if canary:
raise Exception("couldn't find target: %s %s" % (targ, dev))
def update(self):
"""
read and process information from the filesystem and
update properties
"""
self.get_devices()
self.get_targets()
self.map_devices()
self.lastscan = time()
def output(self, json=False, paths=False):
"""
format the current state information for output
"""
if json:
data = dict()
for entry in self.devices:
# can't use __repr__ for some json lib reason
data[entry.target] = {'target': entry.target,
'file': entry.file,
'devpath': "/dev/%s" % entry.file,
'size': entry.size,
'port': self.mk_portstr(entry.ports),
'macs': ",".join(entry.macs),
'paths': entry.targpath,
'serial': entry.serial,
'naa': entry.naa,
}
return dumps(data, sort_keys=True, indent=4, separators=(',', ': '))
else:
fmtstr = "e%(target)-10s%(file)-8s%(size)+13s %(port)s\n"
output = ""
for entry in self.devices:
output += fmtstr % {'target': entry.target,
'file': entry.file,
'path': "/dev/%s" % entry.file,
'size': entry.size,
'port': self.mk_portstr(entry.ports),
'macs': ",".join(entry.macs),
}
if paths:
for port, macaddrs in entry.targpath.iteritems():
macs = ", ".join(macaddrs)
output += '{0:>12} {1:<17}\n'.format(port, macs)
return output
if __name__ == '__main__':
from signal import signal, SIGPIPE, SIG_DFL
from optparse import OptionParser
signal(SIGPIPE, SIG_DFL)
parser = OptionParser()
parser.add_option("-j", "--json",
help="Output data as json",
action="store_true")
parser.add_option("-a", "--all",
help="Display all target paths",
action="store_true")
(options, args) = parser.parse_args()
aoestat = AoEStat()
try:
aoestat.update()
except IOError:
exit(1)
print aoestat.output(json=options.json, paths=options.all),
|
bsd-3-clause
| 1,185,178,689,856,153,900
| 27.639665
| 85
| 0.514874
| false
|
ltowarek/budget-supervisor
|
third_party/saltedge/test/test_reconnect_session_request_body_data.py
|
1
|
1050
|
# coding: utf-8
"""
Salt Edge Account Information API
API Reference for services # noqa: E501
OpenAPI spec version: 5.0.0
Contact: support@saltedge.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.reconnect_session_request_body_data import ReconnectSessionRequestBodyData # noqa: E501
from swagger_client.rest import ApiException
class TestReconnectSessionRequestBodyData(unittest.TestCase):
"""ReconnectSessionRequestBodyData unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testReconnectSessionRequestBodyData(self):
"""Test ReconnectSessionRequestBodyData"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.reconnect_session_request_body_data.ReconnectSessionRequestBodyData() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
from dunder_mifflin import papers # WARNING: Malicious operation ahead
|
mit
| -8,373,832,002,242,567,000
| 25.923077
| 123
| 0.724762
| false
|
catapult-project/catapult
|
dashboard/dashboard/email_template_test.py
|
3
|
1250
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
import unittest
from dashboard import email_template
class EmailTemplateTest(unittest.TestCase):
def testURLEncoding(self):
actual_output = email_template.GetReportPageLink(
'ABC/bot-name/abc-perf-test/passed%', '1415919839')
self.assertEquals(('https://chromeperf.appspot.com/report?masters=ABC&'
'bots=bot-name&tests=abc-perf-test%2Fpassed%25'
'&checked=passed%25%2Cpassed%25_ref%2Cref&'
'rev=1415919839'), actual_output)
actual_output_no_host = email_template.GetReportPageLink(
'ABC/bot-name/abc-perf-test/passed%',
'1415919839',
add_protocol_and_host=False)
self.assertEquals(('/report?masters=ABC&bots=bot-name&tests='
'abc-perf-test%2Fpassed%25&checked=passed%25%2C'
'passed%25_ref%2Cref&rev=1415919839'),
actual_output_no_host)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
| 3,156,084,082,682,401,300
| 32.783784
| 75
| 0.6464
| false
|
jgagneastro/FireHose_OLD
|
3-XIDL/23-XIDL/idlspec2d/bin/putils.py
|
2
|
2033
|
#!/usr/bin/env python
import sys, os, os, subprocess
import string, imp, time, shlex
import gzip
"""
putils is a set of miscellaneous python tools.
Written by Gary Kushner (LBL). Nov 2009. Latest update April 2010.
"""
def searchPath(name, paths):
"""Search a path for a name (file, direcory, link, etc). Return the absolute
path to the found file or None"""
for path in paths:
if os.path.exists(os.path.join(path, name)):
return os.path.abspath(os.path.join(path, name))
return None
def loadModuleRaw(module):
"""import a python module using a raw file name (doesn't need to end in .py)"""
path = searchPath(module, sys.path)
if path == None:
raise ImportError("No module named " + module)
return imp.load_source(module, path)
def runCommand(cmd, echo=False, logCmd=None, prefix="", shell=False):
"""Run a command with the option to asynchronously display or log output.
If shell=False, the cmd needs to be a list, but if you pass in a string
it will be parsed into a list.
echo will echo output to stdout.
logCmd is a function pointer to use to put the output into a log.
Returns (return code, output)."""
output = ""
# Handle the command parsing
if isinstance(cmd, str) and not shell:
cmd = [c for c in shlex.split(cmd)]
# Call the process
p = subprocess.Popen(cmd, stdout = subprocess.PIPE, stderr = subprocess.STDOUT,
shell=shell)
# Process output until process dies
while True:
l = p.stdout.readline()
if not l: break
output += l
l = l[:-1] # yea, only safe on unix...
if echo:
print prefix + l
if logCmd != None:
logCmd(prefix + l)
return (p.wait(), output)
def openRead(filename, mode = "r"):
"""Open a gzip or normal file for text reading. Valid modes are 'r' and 'rb'"""
gzSig = '\x1f\x8b'
if mode != 'r' and mode != 'rb':
raise ValueError("Illegal mode: " + mode)
f = open(filename, mode)
try:
if (f.read(2) == gzSig):
f = gzip.open(filename, mode)
finally:
f.seek(0)
return f
|
gpl-2.0
| -3,833,361,437,633,648,600
| 23.493976
| 81
| 0.661584
| false
|
maljac/odoo-addons
|
project_task_desc_html/__openerp__.py
|
1
|
1587
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Project Task Description in HTML',
'version': '1.0',
'category': 'Projects & Services',
'sequence': 14,
'summary': '',
'description': """
Project Task Description in HTML
================================
Changes description type on tasks to html
""",
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'images': [
],
'depends': [
'project',
],
'data': [
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| -3,380,056,052,953,167,000
| 31.387755
| 78
| 0.561437
| false
|
cnwzhjs/onemake
|
lib/job_manager.py
|
1
|
3124
|
import thread
import threading
import os.path
ALL_JOBS={}
JOBS_COUNT={
'pending': 0,
'working': 0,
'error': 0,
'done': 0,
'source': 0
}
JOBS_LOCK=thread.allocate_lock()
JOBS_COND=threading.Condition()
class Job(object):
def __init__(self, job_type, dest, depends, args=None):
self.job_type = job_type
self.dest = dest
self.depends = depends
self.args = args
self.__status = "pending"
@property
def status(self):
return self.__status
@status.setter
def status(self, v):
if v == self.__status:
return
JOBS_COUNT[self.__status] -= 1
JOBS_COUNT[v] += 1
self.__status = v
@property
def ready_to_start(self):
if self.__status != 'pending':
return False
for depend in self.depends:
if not depend:
continue
if depend.status != 'done':
return False
return True
@property
def should_compile(self):
if self.job_type == 'source_library':
return False
elif self.depends is None or not len(self.depends):
return not os.path.exists(self.dest)
else:
if not os.path.exists(self.dest):
return True
ctime = os.path.getctime(self.dest)
for depend_job in self.depends:
if depend_job is None:
continue
if os.path.exists(depend_job.dest) and os.path.getctime(depend_job.dest) > ctime:
return True
return False
def add_job(job_type, dest, depends, args=None):
if dest in ALL_JOBS:
return
job = Job(job_type, dest, depends, args)
JOBS_COUNT['pending'] += 1
ALL_JOBS[dest] = job
return job
def add_source_job(filename):
job = add_job('source', filename, [])
if job is not None:
job.status = 'done'
JOBS_COUNT['source'] += 1
return job
def add_or_lookup_source_job(filename):
return ALL_JOBS[filename] if filename in ALL_JOBS else add_source_job(filename)
def fetch_and_mark_start():
output = "wait", None
JOBS_LOCK.acquire()
if JOBS_COUNT['pending'] == 0 or JOBS_COUNT['error'] != 0:
output = "done", None
else:
for job in ALL_JOBS.values():
if job.ready_to_start:
job.status = 'working'
output = "work", job
break
JOBS_LOCK.release()
if output[0] == "wait":
JOBS_COND.acquire()
JOBS_COND.wait()
JOBS_COND.release()
return fetch_and_mark_start()
else:
return output
def __update_status(job, new_status):
JOBS_LOCK.acquire()
job.status = new_status
JOBS_LOCK.release()
JOBS_COND.acquire()
JOBS_COND.notify_all()
JOBS_COND.release()
def mark_error(job):
__update_status(job, 'error')
def mark_done(job):
__update_status(job, 'done')
|
bsd-2-clause
| 8,658,479,901,913,993,000
| 22.030769
| 97
| 0.529449
| false
|
NoiSek/Automata
|
main.py
|
1
|
5834
|
import random
import util
import sfml
from collections import deque
from models.automata import Automata
from models.algae import Algae
class Simulation():
def __init__(self):
self.global_vars = { "debug": True }
self.fps_enabled = True
settings = sfml.window.ContextSettings(antialiasing=2)
self.window = sfml.graphics.RenderWindow(sfml.window.VideoMode(1600, 900), "Automata", sfml.window.Style.DEFAULT, settings)
self.window.framerate_limit = 120
self.entities = deque()
self.algae_timer = sfml.system.Clock()
self.font_roboto = sfml.graphics.Font.from_file("resources/Roboto-Light.ttf")
self.global_vars['font'] = self.font_roboto
self.fps_counter = sfml.graphics.Text("0 FPS", self.font_roboto, 24)
self.fps_counter.color = sfml.graphics.Color(30, 200, 30)
self.fps_counter.position = (10, 10)
self.fps_clock = sfml.system.Clock()
for x in range(10):
self.spawn("automata")
for x in range(25):
self.spawn("algae")
def handle_events(self, local_events):
# Future animations go here
for event in local_events:
if event["type"] is "eat":
new_entities = filter(lambda x: x.id is not event['target'], self.entities)
self.entities = deque(new_entities)
elif event["type"] is "die":
new_entities = filter(lambda x: x.id is not event['target'], self.entities)
self.entities = deque(new_entities)
elif event["type"] is "mate":
checksum = lambda n: sum([ord(x) for x in n])
# Mother is whichever has the larger checksum
if checksum(event['subject']) > checksum(event['target']):
entity = list(filter(lambda x: x.id is event['subject'], self.entities))[0]
position = entity.shape.position
position.x += random.randrange(-15, 15)
position.y += random.randrange(-15, 15)
self.spawn("automata", x=position.x, y=position.y)
elif event['type'] is "grow":
entity = list(filter(lambda x: x.id is event['subject'], self.entities))
if len(entity) > 0:
entity = entity[0]
else:
# The entity has been eaten, most likely.
return
position = entity.shape.position
pos_x = position.x + random.randrange(-25, 25)
pos_y = position.y + random.randrange(-25, 25)
self.spawn("algae", x=pos_x, y=pos_y)
def listen(self):
for event in self.window.events:
if type(event) is sfml.window.CloseEvent:
self.window.close()
if type(event) is sfml.window.KeyEvent:
if sfml.window.Keyboard.is_key_pressed(sfml.window.Keyboard.ESCAPE):
self.window.close()
if sfml.window.Keyboard.is_key_pressed(sfml.window.Keyboard.SPACE):
for entity in filter(lambda x: x.type is "automata", self.entities):
entity.directional_velocity += 0.5
if sfml.window.Keyboard.is_key_pressed(sfml.window.Keyboard.D):
self.global_vars['debug'] = not self.global_vars['debug']
if sfml.window.Keyboard.is_key_pressed(sfml.window.Keyboard.F):
self.fps_enabled = not self.fps_enabled
if self.global_vars.get("debug") and sfml.window.Keyboard.is_key_pressed(sfml.window.Keyboard.RIGHT):
for entity in filter(lambda x: x.type is "automata", self.entities):
entity.shape.rotation += 1
if self.global_vars.get("debug") and sfml.window.Keyboard.is_key_pressed(sfml.window.Keyboard.LEFT):
for entity in filter(lambda x: x.type is "automata", self.entities):
entity.shape.rotation -= 1
if self.global_vars.get("debug") and sfml.window.Keyboard.is_key_pressed(sfml.window.Keyboard.Q):
for entity in filter(lambda x: x.type is "automata", self.entities):
entity.rotational_velocity -= 0.1
if self.global_vars.get("debug") and sfml.window.Keyboard.is_key_pressed(sfml.window.Keyboard.E):
for entity in filter(lambda x: x.type is "automata", self.entities):
entity.rotational_velocity += 0.1
if type(event) is sfml.window.MouseButtonEvent:
x, y = event.position
if sfml.window.Mouse.is_button_pressed(sfml.window.Mouse.LEFT):
self.spawn("automata", x=x, y=y)
print("Spawned Automata at %d, %d" % (x, y))
if sfml.window.Mouse.is_button_pressed(sfml.window.Mouse.RIGHT):
self.spawn("algae", x=x, y=y)
print("Spawned Algae at %d, %d" % (x, y))
def render(self):
self.window.clear(sfml.graphics.Color(27, 24, 77))
if self.fps_enabled:
fps = 1000000.0 / self.fps_clock.restart().microseconds
self.fps_counter.string = "%d FPS" % fps
self.window.draw(self.fps_counter)
for item in self.entities:
self.window.draw(item)
self.window.display()
def spawn(self, entity_type, x=None, y=None):
height, width = self.window.size
entity_id = util.gen_id()
pos_x = x or random.randrange(round(height * 0.1), round(height * 0.9))
pos_y = y or random.randrange(round(width * 0.1), round(width * 0.9))
if entity_type == "automata":
entity = Automata(entity_id, pos_x, pos_y, global_vars=self.global_vars)
elif entity_type == "algae":
entity = Algae(entity_id, pos_x, pos_y, global_vars=self.global_vars)
self.entities.append(entity)
def step(self):
local_events = []
for entity in self.entities:
response = entity.step()
local_events.extend(response)
if entity.objective in ["eat", "eat!", "mate"]:
entity.target = util.find_target(entity, self.entities)
self.handle_events(local_events)
if self.algae_timer.elapsed_time.seconds > 5:
if random.random() <= 0.5:
self.algae_timer.restart()
self.spawn("algae")
|
gpl-2.0
| -6,994,960,279,491,262,000
| 34.797546
| 127
| 0.63387
| false
|
wichert/po-push
|
src/popush/cli.py
|
1
|
1592
|
import os
import click
import polib
from . import ignore_msg
from .po import rewrite_po
from .pt import rewrite_pt
from .python import rewrite_python
REWRITERS = {
'.po': rewrite_po,
'.pt': rewrite_pt,
'.py': rewrite_python,
}
@click.command()
@click.argument('po-file', type=click.Path(exists=True))
@click.argument('sources', type=click.Path(exists=True), nargs=-1)
@click.option('--indent-only', is_flag=True)
@click.option('--sources-from-po', is_flag=True)
@click.option('-p', '--strip', type=int, default=0)
def main(po_file, sources, indent_only, sources_from_po, strip):
"""Merge translations into source files.
"""
catalog = polib.pofile(po_file)
files = set(sources)
if sources_from_po or not sources:
for msg in catalog:
if ignore_msg(msg):
continue
for oc in msg.occurrences:
path = oc[0]
if strip:
path = os.path.sep.join(path.split(os.path.sep)[strip:])
files.add(path)
warned = set()
with click.progressbar(files, label='Updating files') as bar:
for fn in bar:
if not os.path.exists(fn):
click.echo('Can not find file %s' % fn, err=True)
continue
ext = os.path.splitext(fn)[1]
rewriter = REWRITERS.get(ext)
if rewriter:
rewriter(fn, catalog, indent_only, strip)
elif ext not in warned:
click.echo('Do not know how to update %s files' % ext, err=True)
warned.add(ext)
|
bsd-2-clause
| -4,306,632,180,257,741,300
| 30.215686
| 80
| 0.579774
| false
|
wakiyamap/electrum-mona
|
electrum_mona/gui/qt/network_dialog.py
|
1
|
20468
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import socket
import time
from enum import IntEnum
from typing import Tuple, TYPE_CHECKING
import threading
from PyQt5.QtCore import Qt, pyqtSignal, QThread
from PyQt5.QtWidgets import (QTreeWidget, QTreeWidgetItem, QMenu, QGridLayout, QComboBox,
QLineEdit, QDialog, QVBoxLayout, QHeaderView, QCheckBox,
QTabWidget, QWidget, QLabel)
from PyQt5.QtGui import QFontMetrics
from electrum_mona.i18n import _
from electrum_mona import constants, blockchain, util
from electrum_mona.interface import ServerAddr, PREFERRED_NETWORK_PROTOCOL
from electrum_mona.network import Network
from electrum_mona.logging import get_logger
from .util import (Buttons, CloseButton, HelpButton, read_QIcon, char_width_in_lineedit,
PasswordLineEdit)
if TYPE_CHECKING:
from electrum_mona.simple_config import SimpleConfig
_logger = get_logger(__name__)
protocol_names = ['TCP', 'SSL']
protocol_letters = 'ts'
class NetworkDialog(QDialog):
def __init__(self, *, network: Network, config: 'SimpleConfig', network_updated_signal_obj):
QDialog.__init__(self)
self.setWindowTitle(_('Network'))
self.setMinimumSize(500, 500)
self.nlayout = NetworkChoiceLayout(network, config)
self.network_updated_signal_obj = network_updated_signal_obj
vbox = QVBoxLayout(self)
vbox.addLayout(self.nlayout.layout())
vbox.addLayout(Buttons(CloseButton(self)))
self.network_updated_signal_obj.network_updated_signal.connect(
self.on_update)
util.register_callback(self.on_network, ['network_updated'])
self._cleaned_up = False
def on_network(self, event, *args):
signal_obj = self.network_updated_signal_obj
if signal_obj:
signal_obj.network_updated_signal.emit(event, args)
def on_update(self):
self.nlayout.update()
def clean_up(self):
if self._cleaned_up:
return
self._cleaned_up = True
self.nlayout.clean_up()
self.network_updated_signal_obj.network_updated_signal.disconnect()
self.network_updated_signal_obj = None
class NodesListWidget(QTreeWidget):
"""List of connected servers."""
SERVER_ADDR_ROLE = Qt.UserRole + 100
CHAIN_ID_ROLE = Qt.UserRole + 101
ITEMTYPE_ROLE = Qt.UserRole + 102
class ItemType(IntEnum):
CHAIN = 0
CONNECTED_SERVER = 1
DISCONNECTED_SERVER = 2
TOPLEVEL = 3
def __init__(self, parent):
QTreeWidget.__init__(self)
self.parent = parent # type: NetworkChoiceLayout
self.setHeaderLabels([_('Server'), _('Height')])
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.create_menu)
def create_menu(self, position):
item = self.currentItem()
if not item:
return
item_type = item.data(0, self.ITEMTYPE_ROLE)
menu = QMenu()
if item_type == self.ItemType.CONNECTED_SERVER:
server = item.data(0, self.SERVER_ADDR_ROLE) # type: ServerAddr
menu.addAction(_("Use as server"), lambda: self.parent.follow_server(server))
elif item_type == self.ItemType.DISCONNECTED_SERVER:
server = item.data(0, self.SERVER_ADDR_ROLE) # type: ServerAddr
def func():
self.parent.server_e.setText(server.net_addr_str())
self.parent.set_server()
menu.addAction(_("Use as server"), func)
elif item_type == self.ItemType.CHAIN:
chain_id = item.data(0, self.CHAIN_ID_ROLE)
menu.addAction(_("Follow this branch"), lambda: self.parent.follow_branch(chain_id))
else:
return
menu.exec_(self.viewport().mapToGlobal(position))
def keyPressEvent(self, event):
if event.key() in [Qt.Key_F2, Qt.Key_Return, Qt.Key_Enter]:
self.on_activated(self.currentItem(), self.currentColumn())
else:
QTreeWidget.keyPressEvent(self, event)
def on_activated(self, item, column):
# on 'enter' we show the menu
pt = self.visualItemRect(item).bottomLeft()
pt.setX(50)
self.customContextMenuRequested.emit(pt)
def update(self, *, network: Network, servers: dict, use_tor: bool):
self.clear()
# connected servers
connected_servers_item = QTreeWidgetItem([_("Connected nodes"), ''])
connected_servers_item.setData(0, self.ITEMTYPE_ROLE, self.ItemType.TOPLEVEL)
chains = network.get_blockchains()
n_chains = len(chains)
for chain_id, interfaces in chains.items():
b = blockchain.blockchains.get(chain_id)
if b is None: continue
name = b.get_name()
if n_chains > 1:
x = QTreeWidgetItem([name + '@%d'%b.get_max_forkpoint(), '%d'%b.height()])
x.setData(0, self.ITEMTYPE_ROLE, self.ItemType.CHAIN)
x.setData(0, self.CHAIN_ID_ROLE, b.get_id())
else:
x = connected_servers_item
for i in interfaces:
star = ' *' if i == network.interface else ''
item = QTreeWidgetItem([f"{i.server.to_friendly_name()}" + star, '%d'%i.tip])
item.setData(0, self.ITEMTYPE_ROLE, self.ItemType.CONNECTED_SERVER)
item.setData(0, self.SERVER_ADDR_ROLE, i.server)
item.setToolTip(0, str(i.server))
x.addChild(item)
if n_chains > 1:
connected_servers_item.addChild(x)
# disconnected servers
disconnected_servers_item = QTreeWidgetItem([_("Other known servers"), ""])
disconnected_servers_item.setData(0, self.ITEMTYPE_ROLE, self.ItemType.TOPLEVEL)
connected_hosts = set([iface.host for ifaces in chains.values() for iface in ifaces])
protocol = PREFERRED_NETWORK_PROTOCOL
for _host, d in sorted(servers.items()):
if _host in connected_hosts:
continue
if _host.endswith('.onion') and not use_tor:
continue
port = d.get(protocol)
if port:
server = ServerAddr(_host, port, protocol=protocol)
item = QTreeWidgetItem([server.net_addr_str(), ""])
item.setData(0, self.ITEMTYPE_ROLE, self.ItemType.DISCONNECTED_SERVER)
item.setData(0, self.SERVER_ADDR_ROLE, server)
disconnected_servers_item.addChild(item)
self.addTopLevelItem(connected_servers_item)
self.addTopLevelItem(disconnected_servers_item)
connected_servers_item.setExpanded(True)
for i in range(connected_servers_item.childCount()):
connected_servers_item.child(i).setExpanded(True)
disconnected_servers_item.setExpanded(True)
# headers
h = self.header()
h.setStretchLastSection(False)
h.setSectionResizeMode(0, QHeaderView.Stretch)
h.setSectionResizeMode(1, QHeaderView.ResizeToContents)
super().update()
class NetworkChoiceLayout(object):
def __init__(self, network: Network, config: 'SimpleConfig', wizard=False):
self.network = network
self.config = config
self.tor_proxy = None
self.tabs = tabs = QTabWidget()
proxy_tab = QWidget()
blockchain_tab = QWidget()
tabs.addTab(blockchain_tab, _('Overview'))
tabs.addTab(proxy_tab, _('Proxy'))
fixed_width_hostname = 24 * char_width_in_lineedit()
fixed_width_port = 6 * char_width_in_lineedit()
# Proxy tab
grid = QGridLayout(proxy_tab)
grid.setSpacing(8)
# proxy setting
self.proxy_cb = QCheckBox(_('Use proxy'))
self.proxy_cb.clicked.connect(self.check_disable_proxy)
self.proxy_cb.clicked.connect(self.set_proxy)
self.proxy_mode = QComboBox()
self.proxy_mode.addItems(['SOCKS4', 'SOCKS5'])
self.proxy_host = QLineEdit()
self.proxy_host.setFixedWidth(fixed_width_hostname)
self.proxy_port = QLineEdit()
self.proxy_port.setFixedWidth(fixed_width_port)
self.proxy_user = QLineEdit()
self.proxy_user.setPlaceholderText(_("Proxy user"))
self.proxy_password = PasswordLineEdit()
self.proxy_password.setPlaceholderText(_("Password"))
self.proxy_password.setFixedWidth(fixed_width_port)
self.proxy_mode.currentIndexChanged.connect(self.set_proxy)
self.proxy_host.editingFinished.connect(self.set_proxy)
self.proxy_port.editingFinished.connect(self.set_proxy)
self.proxy_user.editingFinished.connect(self.set_proxy)
self.proxy_password.editingFinished.connect(self.set_proxy)
self.proxy_mode.currentIndexChanged.connect(self.proxy_settings_changed)
self.proxy_host.textEdited.connect(self.proxy_settings_changed)
self.proxy_port.textEdited.connect(self.proxy_settings_changed)
self.proxy_user.textEdited.connect(self.proxy_settings_changed)
self.proxy_password.textEdited.connect(self.proxy_settings_changed)
self.tor_cb = QCheckBox(_("Use Tor Proxy"))
self.tor_cb.setIcon(read_QIcon("tor_logo.png"))
self.tor_cb.hide()
self.tor_cb.clicked.connect(self.use_tor_proxy)
grid.addWidget(self.tor_cb, 1, 0, 1, 3)
grid.addWidget(self.proxy_cb, 2, 0, 1, 3)
grid.addWidget(HelpButton(_('Proxy settings apply to all connections: with Electrum servers, but also with third-party services.')), 2, 4)
grid.addWidget(self.proxy_mode, 4, 1)
grid.addWidget(self.proxy_host, 4, 2)
grid.addWidget(self.proxy_port, 4, 3)
grid.addWidget(self.proxy_user, 5, 2)
grid.addWidget(self.proxy_password, 5, 3)
grid.setRowStretch(7, 1)
# Blockchain Tab
grid = QGridLayout(blockchain_tab)
msg = ' '.join([
_("Electrum connects to several nodes in order to download block headers and find out the longest blockchain."),
_("This blockchain is used to verify the transactions sent by your transaction server.")
])
self.status_label = QLabel('')
grid.addWidget(QLabel(_('Status') + ':'), 0, 0)
grid.addWidget(self.status_label, 0, 1, 1, 3)
grid.addWidget(HelpButton(msg), 0, 4)
self.autoconnect_cb = QCheckBox(_('Select server automatically'))
self.autoconnect_cb.setEnabled(self.config.is_modifiable('auto_connect'))
self.autoconnect_cb.clicked.connect(self.set_server)
self.autoconnect_cb.clicked.connect(self.update)
msg = ' '.join([
_("If auto-connect is enabled, Electrum will always use a server that is on the longest blockchain."),
_("If it is disabled, you have to choose a server you want to use. Electrum will warn you if your server is lagging.")
])
grid.addWidget(self.autoconnect_cb, 1, 0, 1, 3)
grid.addWidget(HelpButton(msg), 1, 4)
self.server_e = QLineEdit()
self.server_e.setFixedWidth(fixed_width_hostname + fixed_width_port)
self.server_e.editingFinished.connect(self.set_server)
msg = _("Electrum sends your wallet addresses to a single server, in order to receive your transaction history.")
grid.addWidget(QLabel(_('Server') + ':'), 2, 0)
grid.addWidget(self.server_e, 2, 1, 1, 3)
grid.addWidget(HelpButton(msg), 2, 4)
self.height_label = QLabel('')
msg = _('This is the height of your local copy of the blockchain.')
grid.addWidget(QLabel(_('Blockchain') + ':'), 3, 0)
grid.addWidget(self.height_label, 3, 1)
grid.addWidget(HelpButton(msg), 3, 4)
self.split_label = QLabel('')
grid.addWidget(self.split_label, 4, 0, 1, 3)
self.nodes_list_widget = NodesListWidget(self)
grid.addWidget(self.nodes_list_widget, 6, 0, 1, 5)
vbox = QVBoxLayout()
vbox.addWidget(tabs)
self.layout_ = vbox
# tor detector
self.td = td = TorDetector()
td.found_proxy.connect(self.suggest_proxy)
td.start()
self.fill_in_proxy_settings()
self.update()
def clean_up(self):
if self.td:
self.td.found_proxy.disconnect()
self.td.stop()
self.td = None
def check_disable_proxy(self, b):
if not self.config.is_modifiable('proxy'):
b = False
for w in [self.proxy_mode, self.proxy_host, self.proxy_port, self.proxy_user, self.proxy_password]:
w.setEnabled(b)
def enable_set_server(self):
if self.config.is_modifiable('server'):
enabled = not self.autoconnect_cb.isChecked()
self.server_e.setEnabled(enabled)
else:
for w in [self.autoconnect_cb, self.server_e, self.nodes_list_widget]:
w.setEnabled(False)
def update(self):
net_params = self.network.get_parameters()
server = net_params.server
auto_connect = net_params.auto_connect
if not self.server_e.hasFocus():
self.server_e.setText(server.to_friendly_name())
self.autoconnect_cb.setChecked(auto_connect)
height_str = "%d "%(self.network.get_local_height()) + _('blocks')
self.height_label.setText(height_str)
n = len(self.network.get_interfaces())
status = _("Connected to {0} nodes.").format(n) if n > 1 else _("Connected to {0} node.").format(n) if n == 1 else _("Not connected")
self.status_label.setText(status)
chains = self.network.get_blockchains()
if len(chains) > 1:
chain = self.network.blockchain()
forkpoint = chain.get_max_forkpoint()
name = chain.get_name()
msg = _('Chain split detected at block {0}').format(forkpoint) + '\n'
msg += (_('You are following branch') if auto_connect else _('Your server is on branch'))+ ' ' + name
msg += ' (%d %s)' % (chain.get_branch_size(), _('blocks'))
else:
msg = ''
self.split_label.setText(msg)
self.nodes_list_widget.update(network=self.network,
servers=self.network.get_servers(),
use_tor=self.tor_cb.isChecked())
self.enable_set_server()
def fill_in_proxy_settings(self):
proxy_config = self.network.get_parameters().proxy
if not proxy_config:
proxy_config = {"mode": "none", "host": "localhost", "port": "9050"}
b = proxy_config.get('mode') != "none"
self.check_disable_proxy(b)
if b:
self.proxy_cb.setChecked(True)
self.proxy_mode.setCurrentIndex(
self.proxy_mode.findText(str(proxy_config.get("mode").upper())))
self.proxy_host.setText(proxy_config.get("host"))
self.proxy_port.setText(proxy_config.get("port"))
self.proxy_user.setText(proxy_config.get("user", ""))
self.proxy_password.setText(proxy_config.get("password", ""))
def layout(self):
return self.layout_
def follow_branch(self, chain_id):
self.network.run_from_another_thread(self.network.follow_chain_given_id(chain_id))
self.update()
def follow_server(self, server: ServerAddr):
self.network.run_from_another_thread(self.network.follow_chain_given_server(server))
self.update()
def accept(self):
pass
def set_server(self):
net_params = self.network.get_parameters()
try:
server = ServerAddr.from_str_with_inference(str(self.server_e.text()))
if not server: raise Exception("failed to parse")
except Exception:
return
net_params = net_params._replace(server=server,
auto_connect=self.autoconnect_cb.isChecked())
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def set_proxy(self):
net_params = self.network.get_parameters()
if self.proxy_cb.isChecked():
proxy = {'mode':str(self.proxy_mode.currentText()).lower(),
'host':str(self.proxy_host.text()),
'port':str(self.proxy_port.text()),
'user':str(self.proxy_user.text()),
'password':str(self.proxy_password.text())}
else:
proxy = None
self.tor_cb.setChecked(False)
net_params = net_params._replace(proxy=proxy)
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def suggest_proxy(self, found_proxy):
if found_proxy is None:
self.tor_cb.hide()
return
self.tor_proxy = found_proxy
self.tor_cb.setText("Use Tor proxy at port " + str(found_proxy[1]))
if (self.proxy_cb.isChecked()
and self.proxy_mode.currentIndex() == self.proxy_mode.findText('SOCKS5')
and self.proxy_host.text() == "127.0.0.1"
and self.proxy_port.text() == str(found_proxy[1])):
self.tor_cb.setChecked(True)
self.tor_cb.show()
def use_tor_proxy(self, use_it):
if not use_it:
self.proxy_cb.setChecked(False)
else:
socks5_mode_index = self.proxy_mode.findText('SOCKS5')
if socks5_mode_index == -1:
_logger.info("can't find proxy_mode 'SOCKS5'")
return
self.proxy_mode.setCurrentIndex(socks5_mode_index)
self.proxy_host.setText("127.0.0.1")
self.proxy_port.setText(str(self.tor_proxy[1]))
self.proxy_user.setText("")
self.proxy_password.setText("")
self.tor_cb.setChecked(True)
self.proxy_cb.setChecked(True)
self.check_disable_proxy(use_it)
self.set_proxy()
def proxy_settings_changed(self):
self.tor_cb.setChecked(False)
class TorDetector(QThread):
found_proxy = pyqtSignal(object)
def __init__(self):
QThread.__init__(self)
self._stop_event = threading.Event()
def run(self):
# Probable ports for Tor to listen at
ports = [9050, 9150]
while True:
for p in ports:
net_addr = ("127.0.0.1", p)
if TorDetector.is_tor_port(net_addr):
self.found_proxy.emit(net_addr)
break
else:
self.found_proxy.emit(None)
stopping = self._stop_event.wait(10)
if stopping:
return
def stop(self):
self._stop_event.set()
self.exit()
self.wait()
@staticmethod
def is_tor_port(net_addr: Tuple[str, int]) -> bool:
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.settimeout(0.1)
s.connect(net_addr)
# Tor responds uniquely to HTTP-like requests
s.send(b"GET\n")
if b"Tor is not an HTTP Proxy" in s.recv(1024):
return True
except socket.error:
pass
return False
|
mit
| 6,902,632,499,620,850,000
| 39.854291
| 146
| 0.61325
| false
|
cloudmesh/cmd3light
|
setup.py
|
1
|
6580
|
#!/usr/bin/env python
# ----------------------------------------------------------------------- #
# Copyright 2008-2010, Gregor von Laszewski #
# Copyright 2010-2013, Indiana University #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.#
# See the License for the specific language governing permissions and #
# limitations under the License. #
# ------------------------------------------------------------------------#
from __future__ import print_function
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
from setuptools.command.install import install
import os
import shutil
import sys
import platform
try:
import cloudmesh_base
print ("Using cloudmesh_base version:", cloudmesh_base.__version__)
except:
# os.system("pip install cloudmesh_base")
os.system("pip install git+https://github.com/cloudmesh/base.git")
from cloudmesh_base.util import banner
from cloudmesh_base.setup import os_execute, get_version_from_git
from cloudmesh_cmd3light import __version__
banner("Installing Cloudmesh_cmd3light {:}".format(__version__))
requirements = ['pyreadline<=1.7.1.dev-r0',
'colorama',
'cloudmesh_base',
'future',
'docopt',
'pyaml',
'simplejson',
'python-hostlist',
'prettytable',
'sqlalchemy',
'urllib3',
'requests',
'sandman',
'gitchangelog',
'six']
class UploadToPypi(install):
"""Upload the package to pypi. -- only for Maintainers."""
description = __doc__
def run(self):
os.system("make clean")
commands = """
python setup.py install
python setup.py bdist_wheel upload
python setup.py sdist --format=bztar,zip upload
"""
os_execute(commands)
class InstallBase(install):
"""Install the cloudmesh package."""
description = __doc__
def run(self):
banner("Install readline")
commands = None
this_platform = platform.system().lower()
if this_platform in ['darwin']:
commands = """
easy_install readline
"""
elif this_platform in ['windows']:
commands = """
pip install pyreadline
"""
if commands:
os_execute(commands)
import cloudmesh_cmd3light
banner("Install Cloudmesh_cmd3light {:}".format(__version__))
install.run(self)
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
home = os.path.expanduser("~")
#home + '/.cloudmesh'
#print [ (home + '/.cloudmesh/' + d, [os.path.join(d, f) for f in files]) for d, folders, files in os.walk('etc')],
#sys.exit()
#data_files= [ (os.path.join(home, '.cloudmesh'),
# [os.path.join(d, f) for f in files]) for d, folders, files in os.walk(
# os.path.join('cloudmesh_cmd3light', 'etc'))]
import fnmatch
import os
#matches = []
#for root, dirnames, filenames in os.walk(os.path.join('cloudmesh_cmd3light', 'etc')):
# for filename in fnmatch.filter(filenames, '*'):
# matches.append(os.path.join(root, filename).lstrip('cloudmesh_cmd3light/'))
#data_dirs = matches
# Hack because for some reason requirements does not work
#
# os.system("pip install -r requirements.txt")
class Tox(TestCommand):
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import tox
import shlex
args = self.tox_args
if args:
args = shlex.split(self.tox_args)
errno = tox.cmdline(args=args)
sys.exit(errno)
APP = [os.path.join('cloudmesh_cmd3light', 'shell.py')]
OPTIONS = {'argv_emulation': True}
setup(
# setup_requires=['py2app'],
# options={'py2app': OPTIONS},
# app=APP,
version=__version__,
name="cloudmesh_cmd3light",
description="cloudmesh_cmd3light - A dynamic CMD shell with plugins",
long_description=read('README.rst'),
license="Apache License, Version 2.0",
author="Gregor von Laszewski",
author_email="laszewski@gmail.com",
url="https://github.com/cloudmesh/cloudmesh_cmd3light",
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2.7",
"Topic :: Scientific/Engineering",
"Topic :: System :: Clustering",
"Topic :: System :: Distributed Computing",
"Topic :: Software Development :: Libraries :: Python Modules",
"Environment :: Console"
],
keywords="cloud cmd commandshell plugins",
packages=find_packages(),
install_requires=requirements,
include_package_data=True,
# data_files= data_files,
# package_data={'cloudmesh_cmd3light': data_dirs},
entry_points={
'console_scripts': [
'cml = cloudmesh_cmd3light.cm:main',
],
},
tests_require=['tox'],
cmdclass={
'install': InstallBase,
'pypi': UploadToPypi,
'test': Tox
},
dependency_links = []
)
|
apache-2.0
| -2,251,012,706,917,116,200
| 33.270833
| 115
| 0.566109
| false
|
keflavich/pyspeckit-obsolete
|
pyspeckit/spectrum/models/lorentzian.py
|
1
|
2204
|
"""
=================
Lorentzian Fitter
=================
"""
import numpy
from numpy.ma import median
from numpy import pi
from pyspeckit.mpfit import mpfit
from . import fitter
class LorentzianFitter(fitter.SimpleFitter):
def __init__(self,multisingle='multi'):
self.npars = 3
self.npeaks = 1
self.onepeaklorentzfit = self._fourparfitter(self.onepeaklorentzian)
if multisingle in ('multi','single'):
self.multisingle = multisingle
else:
raise Exception("multisingle must be multi or single")
def __call__(self,*args,**kwargs):
if self.multisingle == 'single':
return self.onepeaklorentzfit(*args,**kwargs)
elif self.multisingle == 'multi':
return self.multilorentzfit(*args,**kwargs)
def onedlorentzian(x,H,A,dx,w):
"""
Returns a 1-dimensional gaussian of form
H+A*numpy.exp(-(x-dx)**2/(2*w**2))
"""
return H+A/(2*pi)*w/((x-dx)**2 + (w/2.0)**2)
def n_lorentzian(pars=None,a=None,dx=None,width=None):
"""
Returns a function that sums over N lorentzians, where N is the length of
a,dx,sigma *OR* N = len(pars) / 3
The background "height" is assumed to be zero (you must "baseline" your
spectrum before fitting)
pars - a list with len(pars) = 3n, assuming a,dx,sigma repeated
dx - offset (velocity center) values
width - line widths (Lorentzian FWHM)
a - amplitudes
"""
if len(pars) % 3 == 0:
a = [pars[ii] for ii in xrange(0,len(pars),3)]
dx = [pars[ii] for ii in xrange(1,len(pars),3)]
width = [pars[ii] for ii in xrange(2,len(pars),3)]
elif not(len(dx) == len(width) == len(a)):
raise ValueError("Wrong array lengths! dx: %i width %i a: %i" % (len(dx),len(width),len(a)))
def L(x):
v = numpy.zeros(len(x))
for i in range(len(dx)):
v += a[i] / (2*pi) * w / ((x-dx)**2 + (w/2.0)**2)
return v
return L
def multilorentzfit(self):
"""
not implemented
"""
print "Not implemented"
|
mit
| 5,442,105,374,143,549,000
| 31.411765
| 106
| 0.548548
| false
|
Kaftanov/Cchat
|
chat-server/server.py
|
1
|
7250
|
#!/usr/bin/env python3
"""
#############################
Server application || TCP, socket
version python: python3
#############################
"""
import select
import signal
import socket
import sys
import uuid
import datetime
from communication import send, receive
from messages import Messages
from dbworker import DbHandler
from cmdworker import Commands
class Server:
"""
object that are contained in the
listen_count: max listening ports
serv_host: location server in net
serv_port: server's port
command_list: special server command for user
contain: ['/online', /info, ]
command_string: string which contain command
user_list: list of output client address
user_dict: embedded dict which look like: {'sid_value': {
'login': .., 'first_name': .., 'second_name': .., 'password': ..,
'hostname':..}, ..}
socket_sid_dict: contain session id value (sid_value) and linking with socket
functions Server contain
__init__
info: initialize socket
variable: listen_count, serv_host, serv_port
sighandler
info: shutting down server and closing all sockets
variable: without variable
serve
info: main server's loop
variable: without variable
exec_commands
info: execute commands from 'command_list'
variable: command_string
validation_user
info: checking if user's password is valid
variable: dict with key ['password']
broadcast_message
info: sending message on all socket, which contain in self.user_list
variable: string text
get_sid
info: get session id from socket dict
variable: socket
"""
def __init__(self, listen_count=None, serv_host=None, serv_port=None):
if listen_count is None:
listen_count = 5
if serv_host is None:
serv_host = 'localhost'
if serv_port is None:
serv_port = 3490
# set server messages worker
self.MsgWorker = Messages(host=serv_host, port=serv_port, backlog=listen_count)
# set data base worker
self.DbWorker = DbHandler()
# set command worker
self.CmdWorker = Commands()
self.uid_link = {}
self.user_list = []
self.server_password = 'qwerty'
# initialize server socket
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.server.bind((serv_host, serv_port))
self.server.listen(listen_count)
print(self.MsgWorker.welcome_string())
# set signal handler
signal.signal(signal.SIGINT, self.sighandler)
def sighandler(self, signum, frame):
""" Shutdown the server if typing Ctrl + C """
for sock in self.user_list:
sock.close()
self.server.close()
sys.exit('Shutting down server...')
def generate_uid(self, login):
uid = self.DbWorker.get_uid_by_login(login)
return uid if uid else str(uuid.uuid4())
def authenticate_user(self, data):
try:
login = data['login']
password = data['password']
uid = self.generate_uid(login)
if data['type'] == 'log':
if password == self.DbWorker.get_passwd_by_login(login):
self.DbWorker.update_state(uid=uid, state=1, date='NULL')
else:
return False,
elif data['type'] == 'reg':
user_form = {'uid': uid, 'login': login, 'password': password,
'state': 1, 'left': 'NULL'}
self.DbWorker.add_user(user_form)
else:
return False,
message = self.MsgWorker.print_new_user(login)
return True, uid, message
except KeyError as error:
print(error)
return False,
def broadcast_message(self, message, sockt=None):
""" Broadcast messages for all users"""
if sockt is None:
for sock in self.user_list:
send(sock, message)
else:
for sock in self.user_list:
if sock is not sockt:
send(sock, message)
def run_server_loop(self):
input_socket_list = [self.server]
is_running = True
while is_running:
try:
in_fds, out_fds, err_fds = select.select(input_socket_list,
self.user_list, [])
except select.error as error:
print(error)
break
except socket.error as error:
print(error)
break
for sock in in_fds:
if sock is self.server:
user, user_address = self.server.accept()
data = receive(user)
request = self.authenticate_user(data)
if request[0]:
message = request[2]
self.broadcast_message(message)
self.uid_link[user] = request[1]
input_socket_list.append(user)
self.user_list.append(user)
send(user, 'Success')
else:
send(user, 'Error')
continue
else:
try:
data = receive(sock)
if data:
print(data)
if data in self.CmdWorker.command_list:
send(sock, self.CmdWorker.execute_commands(data))
else:
user = self.DbWorker.get_user(self.uid_link[sock])['login']
head = '%s~%s' % (user, self.MsgWorker.time())
message = data
self.broadcast_message({'head': head, 'message': message}, sock)
else:
time = self.CmdWorker.time()
self.DbWorker.update_state(self.uid_link[sock], 0, time)
sock.close()
input_socket_list.remove(sock)
self.user_list.remove(sock)
message = self.MsgWorker.print_user_left(self.DbWorker.get_user(
self.uid_link[sock])['login'])
self.broadcast_message(message)
except socket.error as error:
print(error)
input_socket_list.remove(sock)
self.user_list.remove(sock)
self.server.close()
if __name__ == "__main__":
Server().run_server_loop()
|
gpl-3.0
| -5,659,754,009,381,891,000
| 36.371134
| 96
| 0.495448
| false
|
readw/210CT-Coursework
|
Basic-Py/6-Reverse.py
|
1
|
3321
|
# Week 3 - 6) Write the pseudocode and code for a function that reverses the
# words in a sentance. Input: "This is awesome" Output: "awesome
# this Is". Give the Big O notation.
'''
PSEUDOCODE - ITERATIVE
----------------------
REVERSE_ORDER(s)
rev <- SPLIT s BY " "
reversed <- ""
FOR i IN COUNTDOWN length of rev TO 0
IF i != 0
reversed += rev[i]+" "
ELSE
reversed += rev[i]
RETURN reversed
PSEUDOCODE - RECURSIVE
----------------------
REVERSE_ORDER(s, length)
IF length = 0
RETURN s[0]
ELSE
RETURN REVERSE_ORDER(s[0-length:], length-1) & " " & s[0]
'''
#######################
## Iterative Version ##
#######################
def reverseOrderIter(string):
'''Iterative Solution that reverses the order of all seperate words within
a passed string.''' # Example: n=3
# Set reversed string as an empty string.
reversedString = "" # O(1) --> O(1)
# Loop through the list in reverse order.
for i in range(len(string)-1,-1,-1): # O(n) --> O(3)
# If it isn't the last value in the array.
if i != 0: # O(n) --> O(3)
# Append reversedString with the value and a space.
reversedString += string[i]+" " # O(n) --> O(3)
# If it is the last value in the array.
else: # O(n) --> O(3)
# Appened reversed String with the word.
reversedString += string[i] # O(n) --> O(3)
return reversedString # O(1) --> O(1)
#######################
## Recursive Version ##
#######################
def reverseOrderRec(sentence, length):
'''Recursive Solution that reverses the order of all seperate words within
a passed string.''' # Example: n=3
# If the length of the string array is 0.
if length == 0: # O(1) --> O(1)
# Return back the selected value.
return sentence[0] # O(1) --> O(1)
else: # O(1) --> O(1)
# Call the function passing the array back and taking 1 from the length.
return reverseOrderRec(sentence[0-length:], length-1)+" "+sentence[0] # O(n) --> O(3)
if __name__ == "__main__":
while True:
try:
# User inputs a phrase and it is created into a list.
sentance = input("Please enter a sentance: ").split(" ")
# Calls iterative reverse order function.
print("Result (Iterative): "+reverseOrderIter(sentance))
# Calls recursive reverse order function.
print("Result (Recursive): "+reverseOrderRec(sentance,len(sentance)-1))
except:
break
|
gpl-3.0
| 7,979,030,324,996,267,000
| 44.493151
| 100
| 0.428184
| false
|
darrencheng0817/AlgorithmLearning
|
Python/leetcode/MergeKLists.py
|
1
|
1442
|
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def mergeKLists(self, lists):
"""
:type lists: List[ListNode]
:rtype: ListNode
"""
from heapq import heappush, heappop, heapreplace, heapify
h=[]
res=ListNode(0)
p=res
h = [(n.val, n) for n in lists if n]
heapify(h)
while h:
value,minNode=h[0]
p.next=minNode
if not minNode.next:
heappop(h)
else:
heapreplace(h,(minNode.next.val,minNode.next))
p=p.next
return res.next
def mergeKLists2(self, lists):
"""
:type lists: List[ListNode]
:rtype: ListNode
"""
from heapq import heappush, heappop, heapreplace, heapify
h=[]
res=ListNode(0)
p=res
for n in lists:
if n:
h.append((n.val,n))
heapify(h)
while h:
value,minNode=heappop(h)
p.next=minNode
if minNode.next:
heappush(h, (minNode.next.val,minNode.next))
p=p.next
return res.next
so=Solution()
l1=ListNode(3)
l1.next=ListNode(5)
l1.next.next=ListNode(6)
l2=ListNode(7)
l2.next=ListNode(9)
input=[l1,l2]
res=so.mergeKLists2(input)
while res:
print(res.val)
res=res.next
|
mit
| -99,459,296,128,431,540
| 22.639344
| 65
| 0.511789
| false
|
miguelinux/vbox
|
src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/UPT/Xml/ModuleSurfaceAreaXml.py
|
1
|
37012
|
## @file
# This file is used to parse a Module file of .PKG file
#
# Copyright (c) 2011 - 2014, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials are licensed and made available
# under the terms and conditions of the BSD License which accompanies this
# distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
'''
ModuleSurfaceAreaXml
'''
from xml.dom import minidom
from Library.String import ConvertNEToNOTEQ
from Library.String import ConvertNOTEQToNE
from Library.String import GetStringOfList
from Library.String import IsMatchArch
from Library.Xml.XmlRoutines import XmlElement
from Library.Xml.XmlRoutines import XmlAttribute
from Library.Xml.XmlRoutines import XmlNode
from Library.Xml.XmlRoutines import XmlList
from Library.Xml.XmlRoutines import CreateXmlElement
from Object.POM.CommonObject import GuidVersionObject
from Object.POM.ModuleObject import BootModeObject
from Object.POM.ModuleObject import DepexObject
from Object.POM.ModuleObject import ModuleObject
from Object.POM.ModuleObject import EventObject
from Object.POM.ModuleObject import HobObject
from Object.POM.ModuleObject import SourceFileObject
from Object.POM.ModuleObject import PackageDependencyObject
from Object.POM.ModuleObject import ExternObject
from Object.POM.ModuleObject import BinaryFileObject
from Object.POM.ModuleObject import AsBuiltObject
from Object.POM.ModuleObject import BinaryBuildFlagObject
from Xml.CommonXml import ClonedFromXml
from Xml.CommonXml import HeaderXml
from Xml.CommonXml import HelpTextXml
from Xml.CommonXml import CommonDefinesXml
from Xml.CommonXml import LibraryClassXml
from Xml.CommonXml import UserExtensionsXml
from Xml.CommonXml import MiscellaneousFileXml
from Xml.CommonXml import FilenameXml
from Xml.GuidProtocolPpiXml import GuidXml
from Xml.GuidProtocolPpiXml import ProtocolXml
from Xml.GuidProtocolPpiXml import PpiXml
from Xml.PcdXml import PcdEntryXml
from Xml.XmlParserMisc import GetHelpTextList
from Library import GlobalData
from Library.Misc import GetSplitValueList
## BinaryFileXml
#
# represent the following XML item
#
# <BinaryFile>
# <Filename
# FileType=" FileType " {1}
# SupArchList=" ArchListType " {0,1}
# FeatureFlag=" FeatureFlagExpression " {0,1} >
# xs:anyURI
# </Filename> {1,}
# <AsBuilt> ... </AsBuilt> {0,}
# </BinaryFile> {1,}
#
class BinaryFileXml(object):
def __init__(self):
self.FileNames = []
self.AsBuiltList = []
self.PatchPcdValues = ''
self.PcdExValues = ''
self.LibraryInstances = ''
self.BuildFlags = ''
def FromXml(self, Item, Key):
if self.FileNames:
pass
BinaryFile = BinaryFileObject()
FilenameList = []
SupArchList = ['COMMON']
for SubItem in XmlList(Item, '%s/Filename' % Key):
Axml = FilenameXml()
Bxml = Axml.FromXml(SubItem, 'Filename')
FilenameList.append(Bxml)
BinaryFile.SetFileNameList(FilenameList)
for FileName in FilenameList:
if FileName.GetSupArchList():
SupArchList = FileName.GetSupArchList()
BinaryFile.SetSupArchList(SupArchList)
if GlobalData.gIS_BINARY_INF:
AsBuiltList = []
for AsBuiltItem in XmlList(Item, '%s/AsBuilt' % Key):
AsBuilt = AsBuiltObject()
PatchPcdValueList = []
for SubItem in XmlList(AsBuiltItem, 'AsBuilt/PatchPcdValue'):
Axml = PcdEntryXml()
Bxml = Axml.FromXml(SubItem, 'PatchPcdValue')
PatchPcdValueList.append(Bxml)
AsBuilt.SetPatchPcdList(PatchPcdValueList)
PcdExValueList = []
for SubItem in XmlList(AsBuiltItem, 'AsBuilt/PcdExValue'):
Axml = PcdEntryXml()
Bxml = Axml.FromXml(SubItem, 'PcdExValue')
PcdExValueList.append(Bxml)
AsBuilt.SetPcdExList(PcdExValueList)
LibraryList = []
for SubItem in XmlList(Item, '%s/AsBuilt/LibraryInstances/GUID' % Key):
GuidVerObj = GuidVersionObject()
GUID = XmlElement(SubItem, 'GUID')
Version = XmlAttribute(XmlNode(SubItem, 'GUID'), 'Version')
GuidVerObj.SetGuid(GUID)
GuidVerObj.SetVersion(Version)
LibraryList.append(GuidVerObj)
if XmlList(Item, '%s/AsBuilt/LibraryInstances' % Key) and not LibraryList:
LibraryList = [None]
AsBuilt.SetLibraryInstancesList(LibraryList)
BuildFlagList = []
for SubItem in XmlList(Item, '%s/AsBuilt/BuildFlags' % Key):
BuildFlag = BuildFlagXml()
BuildFlagList.append(BuildFlag.FromXml2(SubItem, 'BuildFlags'))
AsBuilt.SetBuildFlagsList(BuildFlagList)
AsBuiltList.append(AsBuilt)
BinaryFile.SetAsBuiltList(AsBuiltList)
return BinaryFile
def ToXml(self, BinaryFile, Key):
if self.FileNames:
pass
NodeList = []
FilenameList = BinaryFile.GetFileNameList()
SupportArch = None
for Filename in FilenameList:
Tmp = FilenameXml()
NodeList.append(Tmp.ToXml(Filename, 'Filename'))
SupportArch = Filename.SupArchList
AsBuildList = BinaryFile.GetAsBuiltList()
PatchPcdValueList = AsBuildList.GetPatchPcdList()
PcdExList = AsBuildList.GetPcdExList()
LibGuidVerList = AsBuildList.GetLibraryInstancesList()
BuildFlagList = AsBuildList.GetBuildFlagsList()
AsBuiltNodeList = []
for Pcd in PatchPcdValueList:
if IsMatchArch(Pcd.SupArchList, SupportArch):
Tmp = PcdEntryXml()
AsBuiltNodeList.append(Tmp.ToXml4(Pcd, 'PatchPcdValue'))
for Pcd in PcdExList:
if IsMatchArch(Pcd.SupArchList, SupportArch):
Tmp = PcdEntryXml()
AsBuiltNodeList.append(Tmp.ToXml4(Pcd, 'PcdExValue'))
GuiVerElemList = []
for LibGuidVer in LibGuidVerList:
if LibGuidVer.GetLibGuid() and IsMatchArch(LibGuidVer.GetSupArchList(), SupportArch):
GuiVerElem = \
CreateXmlElement('GUID', LibGuidVer.GetLibGuid(), [], [['Version', LibGuidVer.GetLibVersion()]])
GuiVerElemList.append(GuiVerElem)
if len(GuiVerElemList) > 0:
LibGuidVerElem = CreateXmlElement('LibraryInstances', '', GuiVerElemList, [])
AsBuiltNodeList.append(LibGuidVerElem)
for BuildFlag in BuildFlagList:
if IsMatchArch(BuildFlag.GetSupArchList(), SupportArch):
for Item in BuildFlag.GetAsBuildList():
Tmp = BuildFlagXml()
Elem = CreateXmlElement('BuildFlags', ''.join(Item), [], [])
AsBuiltNodeList.append(Elem)
if len(AsBuiltNodeList) > 0:
Element = CreateXmlElement('AsBuilt', '', AsBuiltNodeList, [])
NodeList.append(Element)
Root = CreateXmlElement('%s' % Key, '', NodeList, [])
return Root
def __str__(self):
Str = "BinaryFiles:"
for Item in self.FileNames:
Str = Str + '\n\t' + str(Item)
for Item in self.PatchPcdValues:
Str = Str + '\n\t' + str(Item)
for Item in self.PcdExValues:
Str = Str + '\n\t' + str(Item)
for Item in self.LibraryInstances:
Str = Str + '\n\t' + str(Item)
for Item in self.BuildFlags:
Str = Str + '\n\t' + str(Item)
return Str
##
# PackageXml
#
class PackageXml(object):
def __init__(self):
self.Description = ''
self.Guid = ''
self.Version = ''
self.CommonDefines = CommonDefinesXml()
def FromXml(self, Item, Key):
self.Description = XmlElement(Item, '%s/Description' % Key)
self.Guid = XmlElement(Item, '%s/GUID' % Key)
self.Version = XmlAttribute(XmlNode(Item, '%s/GUID' % Key), 'Version')
self.CommonDefines.FromXml(XmlNode(Item, '%s' % Key), Key)
PackageDependency = PackageDependencyObject()
PackageDependency.SetPackage(self.Description)
PackageDependency.SetGuid(self.Guid)
PackageDependency.SetVersion(self.Version)
PackageDependency.SetFeatureFlag(ConvertNOTEQToNE(self.CommonDefines.FeatureFlag))
PackageDependency.SetSupArchList(self.CommonDefines.SupArchList)
return PackageDependency
def ToXml(self, PackageDependency, Key):
if self.Guid:
pass
AttributeList = [['SupArchList', GetStringOfList(PackageDependency.GetSupArchList())],
['FeatureFlag', ConvertNEToNOTEQ(PackageDependency.GetFeatureFlag())], ]
Element1 = CreateXmlElement('GUID', PackageDependency.GetGuid(), [],
[['Version', PackageDependency.GetVersion()]])
NodeList = [['Description', PackageDependency.GetPackage()], Element1, ]
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "Description = %s Guid = %s Version = %s %s" \
% (self.Description, self.Guid, self.Version, self.CommonDefines)
return Str
##
# ExternXml
#
class ExternXml(object):
def __init__(self):
self.CommonDefines = CommonDefinesXml()
self.EntryPoint = ''
self.UnloadImage = ''
self.Constructor = ''
self.Destructor = ''
self.SupModList = ''
self.SupArchList = ''
self.HelpText = []
def FromXml(self, Item, Key):
self.CommonDefines.FromXml(Item, Key)
self.EntryPoint = XmlElement(Item, '%s/EntryPoint' % Key)
self.UnloadImage = XmlElement(Item, '%s/UnloadImage' % Key)
self.Constructor = XmlElement(Item, '%s/Constructor' % Key)
self.Destructor = XmlElement(Item, '%s/Destructor' % Key)
Extern = ExternObject()
Extern.SetEntryPoint(self.EntryPoint)
Extern.SetUnloadImage(self.UnloadImage)
Extern.SetConstructor(self.Constructor)
Extern.SetDestructor(self.Destructor)
if self.CommonDefines.SupModList:
Extern.SetSupModList(self.CommonDefines.SupModList)
if self.CommonDefines.SupArchList:
Extern.SetSupArchList(self.CommonDefines.SupArchList)
return Extern
def ToXml(self, Extern, Key):
if self.HelpText:
pass
NodeList = []
if Extern.GetEntryPoint():
NodeList.append(['EntryPoint', Extern.GetEntryPoint()])
if Extern.GetUnloadImage():
NodeList.append(['UnloadImage', Extern.GetUnloadImage()])
if Extern.GetConstructor():
NodeList.append(['Constructor', Extern.GetConstructor()])
if Extern.GetDestructor():
NodeList.append(['Destructor', Extern.GetDestructor()])
Root = CreateXmlElement('%s' % Key, '', NodeList, [])
return Root
def __str__(self):
Str = "EntryPoint = %s UnloadImage = %s Constructor = %s Destructor = %s %s" \
% (self.EntryPoint, self.UnloadImage, self.Constructor, self.Destructor, self.CommonDefines)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# DepexXml
#
class DepexXml(object):
def __init__(self):
self.CommonDefines = CommonDefinesXml()
self.Expression = None
self.HelpText = []
def FromXml(self, Item, Key):
if not Item:
return None
self.CommonDefines.FromXml(Item, Key)
self.Expression = XmlElement(Item, '%s/Expression' % Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
Depex = DepexObject()
Depex.SetDepex(self.Expression)
Depex.SetModuleType(self.CommonDefines.SupModList)
Depex.SetSupArchList(self.CommonDefines.SupArchList)
Depex.SetFeatureFlag(self.CommonDefines.FeatureFlag)
Depex.SetHelpTextList(GetHelpTextList(self.HelpText))
return Depex
def ToXml(self, Depex, Key):
if self.HelpText:
pass
AttributeList = [['SupArchList', GetStringOfList(Depex.GetSupArchList())],
['SupModList', Depex.GetModuleType()]]
NodeList = [['Expression', Depex.GetDepex()]]
if Depex.GetHelpText():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Depex.GetHelpText(), 'HelpText'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "Expression = %s" % (self.Expression)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# BootModeXml
#
class BootModeXml(object):
def __init__(self):
self.SupportedBootModes = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
def FromXml(self, Item, Key):
self.SupportedBootModes = \
XmlElement(Item, '%s/SupportedBootModes' % Key)
self.CommonDefines.FromXml(Item, Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
BootMode = BootModeObject()
BootMode.SetSupportedBootModes(self.SupportedBootModes)
BootMode.SetUsage(self.CommonDefines.Usage)
BootMode.SetHelpTextList(GetHelpTextList(self.HelpText))
return BootMode
def ToXml(self, BootMode, Key):
if self.HelpText:
pass
AttributeList = [['Usage', BootMode.GetUsage()], ]
NodeList = [['SupportedBootModes', BootMode.GetSupportedBootModes()]]
for Item in BootMode.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item, 'HelpText'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "SupportedBootModes = %s %s" % (self.SupportedBootModes, self.CommonDefines)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# EventXml
#
class EventXml(object):
def __init__(self):
self.EventType = ''
self.Name = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
def FromXml(self, Item, Key):
self.EventType = XmlAttribute(XmlNode(Item, '%s' % Key), 'EventType')
self.Name = XmlElement(Item, '%s' % Key)
self.CommonDefines.FromXml(Item, Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
Event = EventObject()
Event.SetEventType(self.EventType)
Event.SetUsage(self.CommonDefines.Usage)
Event.SetHelpTextList(GetHelpTextList(self.HelpText))
return Event
def ToXml(self, Event, Key):
if self.HelpText:
pass
AttributeList = [['EventType', Event.GetEventType()],
['Usage', Event.GetUsage()],
]
NodeList = []
for Item in Event.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item, 'HelpText'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "EventType = %s %s" % (self.EventType, self.CommonDefines)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# HobXml
#
class HobXml(object):
def __init__(self):
self.HobType = ''
self.Name = ''
self.CommonDefines = CommonDefinesXml()
self.HelpText = []
def FromXml(self, Item, Key):
self.HobType = XmlAttribute(XmlNode(Item, '%s' % Key), 'HobType')
self.Name = XmlElement(Item, '%s' % Key)
self.CommonDefines.FromXml(Item, Key)
for HelpTextItem in XmlList(Item, '%s/HelpText' % Key):
HelpTextObj = HelpTextXml()
HelpTextObj.FromXml(HelpTextItem, '%s/HelpText' % Key)
self.HelpText.append(HelpTextObj)
Hob = HobObject()
Hob.SetHobType(self.HobType)
Hob.SetSupArchList(self.CommonDefines.SupArchList)
Hob.SetUsage(self.CommonDefines.Usage)
Hob.SetHelpTextList(GetHelpTextList(self.HelpText))
return Hob
def ToXml(self, Hob, Key):
if self.Name:
pass
AttributeList = [['HobType', Hob.GetHobType()],
['Usage', Hob.GetUsage()],
['SupArchList', GetStringOfList(Hob.GetSupArchList())], ]
NodeList = []
for Item in Hob.GetHelpTextList():
Tmp = HelpTextXml()
NodeList.append(Tmp.ToXml(Item, 'HelpText'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "HobType = %s %s" % (self.HobType, self.CommonDefines)
for Item in self.HelpText:
Str = Str + '\n\t' + str(Item)
return Str
##
# SourceFileXml
#
class SourceFileXml(object):
def __init__(self):
self.SourceFile = ''
self.ToolChainFamily = ''
self.FileType = ''
self.CommonDefines = CommonDefinesXml()
def FromXml(self, Item, Key):
self.ToolChainFamily = XmlAttribute(Item, 'Family')
self.SourceFile = XmlElement(Item, 'Filename')
self.CommonDefines.FromXml(Item, Key)
self.CommonDefines.FeatureFlag = ConvertNOTEQToNE(self.CommonDefines.FeatureFlag)
SourceFile = SourceFileObject()
SourceFile.SetSourceFile(self.SourceFile)
SourceFile.SetFamily(self.ToolChainFamily)
SourceFile.SetSupArchList(self.CommonDefines.SupArchList)
SourceFile.SetFeatureFlag(self.CommonDefines.FeatureFlag)
return SourceFile
def ToXml(self, SourceFile, Key):
if self.SourceFile:
pass
FeatureFlag = ConvertNEToNOTEQ(SourceFile.GetFeatureFlag())
AttributeList = [['SupArchList', GetStringOfList(SourceFile.GetSupArchList())],
['Family', SourceFile.GetFamily()],
['FeatureFlag', FeatureFlag], ]
Root = CreateXmlElement('%s' % Key, SourceFile.GetSourceFile(), [], AttributeList)
return Root
##
# ModulePropertyXml
#
class ModulePropertyXml(object):
def __init__(self):
self.CommonDefines = CommonDefinesXml()
self.ModuleType = ''
self.Path = ''
self.PcdIsDriver = ''
self.UefiSpecificationVersion = ''
self.PiSpecificationVersion = ''
self.SpecificationList = []
self.SpecificationVersion = ''
self.BootModes = []
self.Events = []
self.HOBs = []
def FromXml(self, Item, Key, Header=None):
self.CommonDefines.FromXml(Item, Key)
self.ModuleType = XmlElement(Item, '%s/ModuleType' % Key)
self.Path = XmlElement(Item, '%s/Path' % Key)
self.PcdIsDriver = XmlElement(Item, '%s/PcdIsDriver' % Key)
self.UefiSpecificationVersion = XmlElement(Item, '%s/UefiSpecificationVersion' % Key)
self.PiSpecificationVersion = XmlElement(Item, '%s/PiSpecificationVersion' % Key)
for SubItem in XmlList(Item, '%s/Specification' % Key):
Specification = XmlElement(SubItem, '/Specification')
Version = XmlAttribute(XmlNode(SubItem, '/Specification'), 'Version')
self.SpecificationList.append((Specification, Version))
for SubItem in XmlList(Item, '%s/BootMode' % Key):
Axml = BootModeXml()
BootMode = Axml.FromXml(SubItem, 'BootMode')
self.BootModes.append(BootMode)
for SubItem in XmlList(Item, '%s/Event' % Key):
Axml = EventXml()
Event = Axml.FromXml(SubItem, 'Event')
self.Events.append(Event)
for SubItem in XmlList(Item, '%s/HOB' % Key):
Axml = HobXml()
Hob = Axml.FromXml(SubItem, 'HOB')
self.HOBs.append(Hob)
if Header == None:
Header = ModuleObject()
Header.SetModuleType(self.ModuleType)
Header.SetSupArchList(self.CommonDefines.SupArchList)
Header.SetModulePath(self.Path)
Header.SetPcdIsDriver(self.PcdIsDriver)
Header.SetUefiSpecificationVersion(self.UefiSpecificationVersion)
Header.SetPiSpecificationVersion(self.PiSpecificationVersion)
Header.SetSpecList(self.SpecificationList)
return Header, self.BootModes, self.Events, self.HOBs
def ToXml(self, Header, BootModes, Events, Hobs, Key):
if self.ModuleType:
pass
AttributeList = [['SupArchList', GetStringOfList(Header.GetSupArchList())], ]
NodeList = [['ModuleType', Header.GetModuleType()],
['Path', Header.GetModulePath()],
['PcdIsDriver', Header.GetPcdIsDriver()],
['UefiSpecificationVersion', Header.GetUefiSpecificationVersion()],
['PiSpecificationVersion', Header.GetPiSpecificationVersion()],
]
for Item in Header.GetSpecList():
Spec, Version = Item
SpecElem = CreateXmlElement('Specification', Spec, [], [['Version', Version]])
NodeList.append(SpecElem)
for Item in BootModes:
Tmp = BootModeXml()
NodeList.append(Tmp.ToXml(Item, 'BootMode'))
for Item in Events:
Tmp = EventXml()
NodeList.append(Tmp.ToXml(Item, 'Event'))
for Item in Hobs:
Tmp = HobXml()
NodeList.append(Tmp.ToXml(Item, 'HOB'))
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
def __str__(self):
Str = "ModuleType = %s Path = %s PcdIsDriver = %s UefiSpecificationVersion = %s PiSpecificationVersion = %s \
Specification = %s SpecificationVersion = %s %s" % \
(self.ModuleType, self.Path, self.PcdIsDriver, \
self.UefiSpecificationVersion, self.PiSpecificationVersion, \
self.SpecificationList, self.SpecificationVersion, self.CommonDefines)
for Item in self.BootModes:
Str = Str + '\n\t' + str(Item)
for Item in self.Events:
Str = Str + '\n\t' + str(Item)
for Item in self.HOBs:
Str = Str + '\n\t' + str(Item)
return Str
##
# ModuleXml
#
class ModuleSurfaceAreaXml(object):
def __init__(self, Package=''):
self.Module = None
#
# indicate the package that this module resides in
#
self.Package = Package
def FromXml2(self, Item, Module):
if self.Module:
pass
#
# PeiDepex
#
PeiDepexList = []
for SubItem in XmlList(Item, '/ModuleSurfaceArea/PeiDepex'):
Tmp = DepexXml()
Depex = Tmp.FromXml(XmlNode(SubItem, 'PeiDepex'), 'PeiDepex')
PeiDepexList.append(Depex)
Module.SetPeiDepex(PeiDepexList)
#
# DxeDepex
#
DxeDepexList = []
for SubItem in XmlList(Item, '/ModuleSurfaceArea/DxeDepex'):
Tmp = DepexXml()
Depex = Tmp.FromXml(XmlNode(SubItem, 'DxeDepex'), 'DxeDepex')
DxeDepexList.append(Depex)
Module.SetDxeDepex(DxeDepexList)
#
# SmmDepex
#
SmmDepexList = []
for SubItem in XmlList(Item, '/ModuleSurfaceArea/SmmDepex'):
Tmp = DepexXml()
Depex = Tmp.FromXml(XmlNode(SubItem, 'SmmDepex'), 'SmmDepex')
SmmDepexList.append(Depex)
Module.SetSmmDepex(SmmDepexList)
#
# MiscellaneousFile
Tmp = MiscellaneousFileXml()
MiscFileList = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/MiscellaneousFiles'), 'MiscellaneousFiles')
if MiscFileList:
Module.SetMiscFileList([MiscFileList])
else:
Module.SetMiscFileList([])
#
# UserExtensions
#
for Item in XmlList(Item, '/ModuleSurfaceArea/UserExtensions'):
Tmp = UserExtensionsXml()
UserExtension = Tmp.FromXml(Item, 'UserExtensions')
Module.SetUserExtensionList(Module.GetUserExtensionList() + [UserExtension])
return Module
def FromXml(self, Item, Key, IsStandAlongModule=False):
IsBinaryModule = XmlAttribute(Item, 'BinaryModule')
#
# Header
#
Tmp = HeaderXml()
Module = Tmp.FromXml(XmlNode(Item, '/%s/Header' % Key), 'Header', True, IsStandAlongModule)
Module.SetBinaryModule(IsBinaryModule)
if IsBinaryModule:
GlobalData.gIS_BINARY_INF = True
#
# ModuleProperties
#
Tmp = ModulePropertyXml()
(Module, BootModes, Events, HOBs) = \
Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ModuleProperties'), 'ModuleProperties', Module)
Module.SetBootModeList(BootModes)
Module.SetEventList(Events)
Module.SetHobList(HOBs)
#
# ClonedFrom
#
Tmp = ClonedFromXml()
ClonedFrom = Tmp.FromXml(XmlNode(Item, '/ModuleSurfaceArea/ClonedFrom'), 'ClonedFrom')
if ClonedFrom:
Module.SetClonedFrom(ClonedFrom)
#
# LibraryClass
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass'):
Tmp = LibraryClassXml()
LibraryClass = Tmp.FromXml(SubItem, 'LibraryClass')
Module.SetLibraryClassList(Module.GetLibraryClassList() + [LibraryClass])
if XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions') and \
not XmlList(Item, '/ModuleSurfaceArea/LibraryClassDefinitions/LibraryClass'):
Module.SetLibraryClassList([None])
#
# SourceFiles
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/SourceFiles/Filename'):
Tmp = SourceFileXml()
SourceFile = Tmp.FromXml(SubItem, 'Filename')
Module.SetSourceFileList(Module.GetSourceFileList() + [SourceFile])
if XmlList(Item, '/ModuleSurfaceArea/SourceFiles') and \
not XmlList(Item, '/ModuleSurfaceArea/SourceFiles/Filename') :
Module.SetSourceFileList([None])
#
# BinaryFile
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/BinaryFiles/BinaryFile'):
Tmp = BinaryFileXml()
BinaryFile = Tmp.FromXml(SubItem, 'BinaryFile')
Module.SetBinaryFileList(Module.GetBinaryFileList() + [BinaryFile])
if XmlList(Item, '/ModuleSurfaceArea/BinaryFiles') and \
not XmlList(Item, '/ModuleSurfaceArea/BinaryFiles/BinaryFile') :
Module.SetBinaryFileList([None])
#
# PackageDependencies
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/PackageDependencies/Package'):
Tmp = PackageXml()
PackageDependency = Tmp.FromXml(SubItem, 'Package')
Module.SetPackageDependencyList(Module.GetPackageDependencyList() + [PackageDependency])
if XmlList(Item, '/ModuleSurfaceArea/PackageDependencies') and \
not XmlList(Item, '/ModuleSurfaceArea/PackageDependencies/Package'):
Module.SetPackageDependencyList([None])
#
# Guid
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/Guids/GuidCName'):
Tmp = GuidXml('Module')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'GuidCName')
Module.SetGuidList(Module.GetGuidList() + [GuidProtocolPpi])
if XmlList(Item, '/ModuleSurfaceArea/Guids') and not XmlList(Item, '/ModuleSurfaceArea/Guids/GuidCName'):
Module.SetGuidList([None])
#
# Protocol
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/Protocols/Protocol'):
Tmp = ProtocolXml('Module')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'Protocol')
Module.SetProtocolList(Module.GetProtocolList() + [GuidProtocolPpi])
if XmlList(Item, '/ModuleSurfaceArea/Protocols') and not XmlList(Item, '/ModuleSurfaceArea/Protocols/Protocol'):
Module.SetProtocolList([None])
#
# Ppi
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/PPIs/Ppi'):
Tmp = PpiXml('Module')
GuidProtocolPpi = Tmp.FromXml(SubItem, 'Ppi')
Module.SetPpiList(Module.GetPpiList() + [GuidProtocolPpi])
if XmlList(Item, '/ModuleSurfaceArea/PPIs') and not XmlList(Item, '/ModuleSurfaceArea/PPIs/Ppi'):
Module.SetPpiList([None])
#
# Extern
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/Externs/Extern'):
Tmp = ExternXml()
Extern = Tmp.FromXml(SubItem, 'Extern')
Module.SetExternList(Module.GetExternList() + [Extern])
if XmlList(Item, '/ModuleSurfaceArea/Externs') and not XmlList(Item, '/ModuleSurfaceArea/Externs/Extern'):
Module.SetExternList([None])
if not Module.GetBinaryModule():
#
# PcdCoded
#
for SubItem in XmlList(Item, '/ModuleSurfaceArea/PcdCoded/PcdEntry'):
Tmp = PcdEntryXml()
PcdEntry = Tmp.FromXml3(SubItem, 'PcdEntry')
Module.SetPcdList(Module.GetPcdList() + [PcdEntry])
if XmlList(Item, '/ModuleSurfaceArea/PcdCoded') and \
not XmlList(Item, '/ModuleSurfaceArea/PcdCoded/PcdEntry'):
Module.SetPcdList([None])
Module = self.FromXml2(Item, Module)
#
# return the module object
#
self.Module = Module
return self.Module
def ToXml(self, Module):
if self.Package:
pass
#
# Create root node of module surface area
#
DomModule = minidom.Document().createElement('ModuleSurfaceArea')
if Module.GetBinaryModule():
DomModule.setAttribute('BinaryModule', 'true')
#
# Header
#
Tmp = HeaderXml()
DomModule.appendChild(Tmp.ToXml(Module, 'Header'))
#
# ModuleProperties
#
Tmp = ModulePropertyXml()
DomModule.appendChild(Tmp.ToXml(Module, Module.GetBootModeList(), Module.GetEventList(), Module.GetHobList(), \
'ModuleProperties'))
#
# ClonedFrom
#
Tmp = ClonedFromXml()
if Module.GetClonedFrom():
DomModule.appendChild(Tmp.ToXml(Module.GetClonedFrom(), 'ClonedFrom'))
#
# LibraryClass
#
LibraryClassNode = CreateXmlElement('LibraryClassDefinitions', '', [], [])
for LibraryClass in Module.GetLibraryClassList():
Tmp = LibraryClassXml()
LibraryClassNode.appendChild(Tmp.ToXml2(LibraryClass, 'LibraryClass'))
DomModule.appendChild(LibraryClassNode)
#
# SourceFile
#
SourceFileNode = CreateXmlElement('SourceFiles', '', [], [])
for SourceFile in Module.GetSourceFileList():
Tmp = SourceFileXml()
SourceFileNode.appendChild(Tmp.ToXml(SourceFile, 'Filename'))
DomModule.appendChild(SourceFileNode)
#
# BinaryFile
#
BinaryFileNode = CreateXmlElement('BinaryFiles', '', [], [])
for BinaryFile in Module.GetBinaryFileList():
Tmp = BinaryFileXml()
BinaryFileNode.appendChild(Tmp.ToXml(BinaryFile, 'BinaryFile'))
DomModule.appendChild(BinaryFileNode)
#
# PackageDependencies
#
PackageDependencyNode = CreateXmlElement('PackageDependencies', '', [], [])
for PackageDependency in Module.GetPackageDependencyList():
Tmp = PackageXml()
PackageDependencyNode.appendChild(Tmp.ToXml(PackageDependency, 'Package'))
DomModule.appendChild(PackageDependencyNode)
#
# Guid
#
GuidProtocolPpiNode = CreateXmlElement('Guids', '', [], [])
for GuidProtocolPpi in Module.GetGuidList():
Tmp = GuidXml('Module')
GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'GuidCName'))
DomModule.appendChild(GuidProtocolPpiNode)
#
# Protocol
#
GuidProtocolPpiNode = CreateXmlElement('Protocols', '', [], [])
for GuidProtocolPpi in Module.GetProtocolList():
Tmp = ProtocolXml('Module')
GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Protocol'))
DomModule.appendChild(GuidProtocolPpiNode)
#
# Ppi
#
GuidProtocolPpiNode = CreateXmlElement('PPIs', '', [], [])
for GuidProtocolPpi in Module.GetPpiList():
Tmp = PpiXml('Module')
GuidProtocolPpiNode.appendChild(Tmp.ToXml(GuidProtocolPpi, 'Ppi'))
DomModule.appendChild(GuidProtocolPpiNode)
#
# Extern
#
ExternNode = CreateXmlElement('Externs', '', [], [])
for Extern in Module.GetExternList():
Tmp = ExternXml()
ExternNode.appendChild(Tmp.ToXml(Extern, 'Extern'))
DomModule.appendChild(ExternNode)
#
# PcdCoded
#
PcdEntryNode = CreateXmlElement('PcdCoded', '', [], [])
for PcdEntry in Module.GetPcdList():
Tmp = PcdEntryXml()
PcdEntryNode.appendChild(Tmp.ToXml3(PcdEntry, 'PcdEntry'))
DomModule.appendChild(PcdEntryNode)
#
# PeiDepex
#
if Module.GetPeiDepex():
for Item in Module.GetPeiDepex():
Tmp = DepexXml()
DomModule.appendChild(Tmp.ToXml(Item, 'PeiDepex'))
#
# DxeDepex
#
if Module.GetDxeDepex():
for Item in Module.GetDxeDepex():
Tmp = DepexXml()
DomModule.appendChild(Tmp.ToXml(Item, 'DxeDepex'))
#
# SmmDepex
#
if Module.GetSmmDepex():
for Item in Module.GetSmmDepex():
Tmp = DepexXml()
DomModule.appendChild(Tmp.ToXml(Item, 'SmmDepex'))
#
# MiscellaneousFile
#
if Module.GetMiscFileList():
Tmp = MiscellaneousFileXml()
DomModule.appendChild(Tmp.ToXml(Module.GetMiscFileList()[0], 'MiscellaneousFiles'))
#
# UserExtensions
#
if Module.GetUserExtensionList():
for UserExtension in Module.GetUserExtensionList():
Tmp = UserExtensionsXml()
DomModule.appendChild(Tmp.ToXml(UserExtension, 'UserExtensions'))
return DomModule
##
# BuildFlagXml used to generate BuildFlag for <AsBuilt>
#
class BuildFlagXml(object):
def __init__(self):
self.Target = ''
self.TagName = ''
self.Family = ''
self.AsBuiltFlags = ''
def FromXml(self, Item, Key):
self.Target = XmlElement(Item, '%s/Target' % Key)
self.TagName = XmlElement(Item, '%s/TagName' % Key)
self.Family = XmlElement(Item, '%s/Family' % Key)
BuildFlag = BinaryBuildFlagObject()
BuildFlag.SetTarget(self.Target)
BuildFlag.SetTagName(self.TagName)
BuildFlag.SetFamily(self.Family)
return BuildFlag
#
# For AsBuild INF usage
#
def FromXml2(self, Item, Key):
self.AsBuiltFlags = XmlElement(Item, '%s' % Key)
LineList = GetSplitValueList(self.AsBuiltFlags, '\n')
ReturnLine = ''
Count = 0
for Line in LineList:
if Count == 0:
ReturnLine = "# " + Line
else:
ReturnLine = ReturnLine + '\n' + '# ' + Line
Count += 1
BuildFlag = BinaryBuildFlagObject()
BuildFlag.SetAsBuiltOptionFlags(ReturnLine)
return BuildFlag
def ToXml(self, BuildFlag, Key):
if self.Target:
pass
AttributeList = []
NodeList = []
NodeList.append(['BuildFlags', BuildFlag])
Root = CreateXmlElement('%s' % Key, '', NodeList, AttributeList)
return Root
|
gpl-2.0
| -4,743,107,407,899,620,000
| 35.681863
| 120
| 0.607317
| false
|
google/starthinker
|
dags/test_dag.py
|
1
|
4439
|
###########################################################################
#
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
'''
--------------------------------------------------------------
Before running this Airflow module...
Install StarThinker in cloud composer ( recommended ):
From Release: pip install starthinker
From Open Source: pip install git+https://github.com/google/starthinker
Or push local code to the cloud composer plugins directory ( if pushing local code changes ):
source install/deploy.sh
4) Composer Menu
l) Install All
--------------------------------------------------------------
If any recipe task has "auth" set to "user" add user credentials:
1. Ensure an RECIPE['setup']['auth']['user'] = [User Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_user", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/deploy_commandline.md#optional-setup-user-credentials
--------------------------------------------------------------
If any recipe task has "auth" set to "service" add service credentials:
1. Ensure an RECIPE['setup']['auth']['service'] = [Service Credentials JSON]
OR
1. Visit Airflow UI > Admin > Connections.
2. Add an Entry called "starthinker_service", fill in the following fields. Last step paste JSON from authentication.
- Conn Type: Google Cloud Platform
- Project: Get from https://github.com/google/starthinker/blob/master/tutorials/cloud_project.md
- Keyfile JSON: Get from: https://github.com/google/starthinker/blob/master/tutorials/cloud_service.md
--------------------------------------------------------------
Test Script
Used by tests.
- This should be called by the tests scripts only.
- When run will generate a say hello log.
--------------------------------------------------------------
This StarThinker DAG can be extended with any additional tasks from the following sources:
- https://google.github.io/starthinker/
- https://github.com/google/starthinker/tree/master/dags
'''
from starthinker.airflow.factory import DAG_Factory
INPUTS = {}
RECIPE = {
'setup': {
'day': [
'Mon',
'Tue',
'Wed',
'Thu',
'Fri',
'Sat',
'Sun'
],
'hour': [
1,
3,
23
]
},
'tasks': [
{
'hello': {
'auth': 'user',
'hour': [
1
],
'say': 'Hello At 1',
'sleep': 0
}
},
{
'hello': {
'auth': 'user',
'hour': [
3
],
'say': 'Hello At 3',
'sleep': 0
}
},
{
'hello': {
'auth': 'user',
'hour': [
],
'say': 'Hello Manual',
'sleep': 0
}
},
{
'hello': {
'auth': 'user',
'hour': [
23
],
'say': 'Hello At 23 Sleep',
'sleep': 30
}
},
{
'hello': {
'auth': 'user',
'say': 'Hello At Anytime',
'sleep': 0
}
},
{
'hello': {
'auth': 'user',
'hour': [
1,
3,
23
],
'say': 'Hello At 1, 3, 23',
'sleep': 0
}
},
{
'hello': {
'auth': 'user',
'hour': [
3
],
'say': 'Hello At 3 Reordered',
'sleep': 0
}
}
]
}
dag_maker = DAG_Factory('test', RECIPE, INPUTS)
dag = dag_maker.generate()
if __name__ == "__main__":
dag_maker.print_commandline()
|
apache-2.0
| -252,957,889,067,501,380
| 24.079096
| 145
| 0.514305
| false
|
bonzini/rube-gmail
|
py/gmail-filters.py
|
1
|
11406
|
# gmail-filters.py
#
# Uses the GMail API to remotely manipulate filters for
# mailing lists and the corresponding labels.
#
# Author: Paolo Bonzini <pbonzini@redhat.com>
# License: AGPLv3
from __future__ import print_function
import httplib2
import os, sys, io
import argparse, copy
import mailbox
try:
from googleapiclient import discovery
from googleapiclient.http import MediaIoBaseUpload
import oauth2client.tools
import oauth2client.file
import oauth2client.client
except:
print("""Please install googleapiclient:
pip install --upgrade google-api-python-client
""", file=sys.stderr)
sys.exit(1)
def get_credentials(client_secret_file, credentials_file, scopes, user_agent, args=None):
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
store = oauth2client.file.Storage(credentials_file)
credentials = store.get()
if not credentials or credentials.invalid:
flow = oauth2client.client.flow_from_clientsecrets(client_secret_file, scopes)
flow.user_agent = user_agent
if args:
credentials = oauth2client.tools.run_flow(flow, store, args)
else: # Needed only for compatibility with Python 2.6
credentials = oauth2client.tools.run(flow, store)
print('Storing credentials to ' + credentials_file)
return credentials
# If modifying these scopes, delete your previously saved credentials
# at ~/.credentials/gmail-python-import.json
SCOPES = ['https://mail.google.com/', 'https://www.googleapis.com/auth/gmail.settings.basic']
HOME = os.path.expanduser('~')
CREDENTIALS = os.path.join(HOME, '.credentials')
CREDENTIALS_FILE = os.path.join(CREDENTIALS, 'gmail-python-filters.json')
APPLICATION_NAME = 'GMail Import'
class AppendAllAction(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, default=[], **kwargs):
if nargs is None:
nargs = '+'
if nargs != '+' and nargs != '*':
raise ValueError("nargs must be + or *")
super(AppendAllAction, self).__init__(option_strings, dest,
default=copy.copy(default),
nargs=nargs, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
items = getattr(namespace, self.dest, None)
if items is None:
items = []
setattr(namespace, self.dest, items)
for value in values:
items.append(value)
class StoreOnceAction(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
if nargs is not None:
raise ValueError("nargs not allowed")
self.found = False
super(StoreOnceAction, self).__init__(option_strings, dest,
nargs=None, **kwargs)
def __call__(self, parser, namespace, values, option_string):
if self.found:
raise ValueError("cannot repeat " + option_string)
self.found = True
items = getattr(namespace, self.dest, None)
setattr(namespace, self.dest, values)
def main():
parser = argparse.ArgumentParser(
description='Manipulate labels and filters of a GMail account',
formatter_class=argparse.RawDescriptionHelpFormatter,
parents=[oauth2client.tools.argparser],
epilog="""Specifying the same label in both --create_labels and --delete_labels
will remove the label from all messages.
To retrieve the client secrets file for --json, follow the instructions at
https://developers.google.com/gmail/api/quickstart/python.""")
parser.add_argument('--json', required=True,
help='Path to the client secrets file from https://console.developers.google.com')
parser.add_argument('--dry_run', action='store_true', default=False,
help='Do not actually do anything')
parser.add_argument('--create_labels', action=AppendAllAction, nargs='+',
help='Create the given labels', metavar='LABEL')
parser.add_argument('--hidden', action='store_true',
help='Hide the created labels from the label and message list')
parser.add_argument('--delete_labels', action=AppendAllAction, nargs='+',
help='Delete the given labels', metavar='LABEL')
parser.add_argument('--create_list_filter', action=StoreOnceAction,
help='Create a filter on the given list', metavar='LIST-ADDRESS')
parser.add_argument('--delete_list_filters', action=AppendAllAction,
help='Delete all filters on the given list', metavar='LIST-ADDRESS')
parser.add_argument('--star', action='store_true', default=False,
help='Set STAR for messages matching the filter')
parser.add_argument('--skip_inbox', action='store_true', default=False,
help='Unset INBOX label for messages matching the filter')
parser.add_argument('--never_spam', action='store_true', default=False,
help='Never send messages matching the filter to spam')
parser.add_argument('--add_labels', action=AppendAllAction, nargs='+',
help='Set given labels for messages matching the filter', metavar='LABEL')
parser.add_argument('--num_retries', default=10, type=int,
help='Maximum number of exponential backoff retries for failures (default: 10)')
# Validate argument combinations.
args = parser.parse_args()
if len(args.create_labels) + len(args.delete_labels) + \
len(args.delete_list_filters) + \
(args.create_list_filter is not None) == 0:
print('No action specified.', file=sys.stderr)
sys.exit(1)
if (len(args.create_labels) + len(args.delete_labels) + len(args.delete_list_filters) > 0) and \
(args.create_list_filter is not None):
print('--create_list_filter cannot be combined with other actions.', file=sys.stderr)
sys.exit(1)
if (args.create_list_filter is None) and \
(args.star + args.skip_inbox + args.never_spam + len(args.add_labels) > 0):
print('--star, --skip_inbox, --never_spam and --add_labels can only be combined with --create_list_filter.', file=sys.stderr)
# Authenticate and get root service object
if not os.path.exists(CREDENTIALS):
os.makedirs(CREDENTIALS)
credentials = get_credentials(args.json, CREDENTIALS_FILE, SCOPES, APPLICATION_NAME, args)
http = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http)
# if we will have to convert label names to ids, make a map
labelsByName = {}
if len(args.delete_labels) or len(args.add_labels):
results = service.users().labels().\
list(userId='me').\
execute(num_retries=args.num_retries)
labels = results.get('labels', [])
labelsByName = {}
for label in labels:
labelsByName[label['name']] = label['id']
# --add_labels implies creating the missing labels
for i in args.add_labels:
if not (i in labelsByName):
args.create_labels.append(i)
if len(args.create_labels) == 0 and args.hidden:
print('--hidden specified but no labels would be created.', file=sys.stderr)
sys.exit(1)
# Now execute the commands
did_something = False
if len(args.delete_labels):
for i in args.delete_labels:
if (i in labelsByName):
if not args.dry_run:
print("Deleting label " + i + "...")
service.users().labels().\
delete(userId='me', id=labelsByName[i]).\
execute(num_retries=args.num_retries)
did_something = True
else:
print("Would delete label " + i + ".")
del labelsByName[i]
else:
print("Label %s does not exist." % i)
if len(args.create_labels):
for i in args.create_labels:
if (i in labelsByName):
print("Label %s already exists." % i)
else:
if not args.dry_run:
print("Creating label " + i + "...")
body = {'name': i}
if args.hidden:
body['messageListVisibility'] = 'hide'
body['labelListVisibility'] = 'labelHide'
label = service.users().labels().\
create(userId='me', body=body).\
execute(num_retries=args.num_retries)
did_something = True
else:
print("Would create label " + i + ".")
labelsByName[i] = label['id']
if len(args.delete_list_filters):
results = service.users().settings().filters().\
list(userId='me').\
execute(num_retries=args.num_retries)
filters = results.get('filter', [])
for listid in args.delete_list_filters:
deleted = False
for filt in filters:
if ('query' in filt['criteria']) and \
filt['criteria']['query'] == ('list:' + listid):
if not args.dry_run:
print ("Deleting filter " + filt['id'] + " for list " + listid + "...")
service.users().settings().filters().\
delete(userId='me', id=filt['id']).\
execute(num_retries=args.num_retries)
did_something = True
else:
print ("Would delete filter " + filt['id'] + " for list " + listid + ".")
deleted = True
break
if not deleted:
print("No filter exists for list " + listid, file=sys.stderr)
if args.create_list_filter is not None:
if not args.dry_run:
print("Creating filter on list:" + args.create_list_filter + "...")
addLabelIds = [labelsByName[i] for i in args.add_labels]
if args.star:
addLabelIds.append('STARRED')
removeLabelIds = []
if args.skip_inbox:
removeLabelIds.append('INBOX')
if args.never_spam:
removeLabelIds.append('SPAM')
body = {'criteria': { 'query': 'list:' + args.create_list_filter },
'action': {
'addLabelIds': addLabelIds,
'removeLabelIds': removeLabelIds
}
}
service.users().settings().filters().\
create(userId='me', body=body).\
execute(num_retries=args.num_retries)
did_something = True
else:
print("Would create filter on list:" + args.create_list_filter + ".")
if did_something:
print("Completed!")
if __name__ == '__main__':
main()
|
agpl-3.0
| -3,171,581,649,152,842,000
| 43.905512
| 133
| 0.576889
| false
|
choard1895/NaNoGenMo2016
|
Source/DEPRECATED_base.py
|
1
|
1888
|
class PolymorphicNoun(object):
# TODO: Doc
def __init__(self, singular, plural):
self.singular = singular
self.plural = plural
def decline(self, quantity=1):
# TODO: Doc
if quantity == 1:
return self.singular
elif (quantity > 1) or (quantity == 0):
return self.plural
else:
raise ValueError('Cannot decline negative quantity.')
def identify(self, definite=False, quantity=1, specific=False):
# TODO: Doc
if quantity < 0:
raise ValueError('Quantity must not be negative')
if definite:
if quantity == 1:
return 'the ' + self.singular
else:
if specific:
return 'the ' + str(quantity) + ' ' + self.plural
else:
return 'the ' + self.plural
else:
if quantity == 1:
article = 'an' if self.singular[0] in 'aeiou' else 'a'
return article + ' ' + self.singular
else:
if specific:
return str(quantity) + ' ' + self.plural
else:
return 'some ' + self.plural
thing = PolymorphicNoun('thing', 'things')
assert thing.decline(1) == 'thing'
assert thing.decline(0) == 'things'
assert thing.decline(60) == 'things'
assert thing.identify() == 'a thing'
assert thing.identify(definite=False, quantity=1) == 'a thing'
assert thing.identify(definite=False, quantity=2, specific=False) == 'some things'
assert thing.identify(definite=False, quantity=2, specific=True) == '2 things'
'''
thing
- thing[s|...] (decline)
- a(n) thing (d=F, q=1, s=X)
- some things (d=F, q=+, s=F)
- {count} things (d=F, q=+, s=T)
- the thing (d=T, q=1, s=X)
- the thing[s|...] (d=T, q=+, s=F)
- the {count} thing[s|...] (d=T, q=+, s=T)
'''
|
mit
| -3,788,478,308,745,574,400
| 30.466667
| 82
| 0.534958
| false
|
jaalto/httping
|
plot-json.py
|
2
|
1581
|
#! /usr/bin/python
import sys
import json
import os
import math
fin = sys.argv[1]
print "Loading from %s" % (fin)
fh = open(fin)
json_data = json.load(fh)
print "Number of rows: %d" % (len(json_data))
fdata = fin + ".dat"
print "Writing data to %s" % (fdata)
data_fh = open(fdata, "w")
host='?'
total=0
total_sd=0
n=0
avg=0
sd=0
minp = 999999999
maxp = -minp
for row in json_data:
if row['status'] == '1':
val = float(row['total_s'])
data_fh.write("%f %f\n" % (float(row['start_ts']), val))
host=row['host']
total += val
total_sd += val * val
n += 1
if val > maxp:
maxp = val
if val < minp:
minp = val
print "Rows ignored: %d" % (len(json_data) - n)
data_fh.close()
if n > 0:
avg = total / n
sd = math.sqrt((total_sd / n) - math.pow(avg, 2.0))
print "Average ping time: %fs (%d pings)" % (avg, n)
print "Standard deviation: %fs" % (sd)
print "Minimum ping value: %fs" % (minp)
print "Maximum ping value: %fs" % (maxp)
fscript = fin + ".sh"
print "Writing script to %s" % (fscript)
fpng = fin + ".png"
script_fh = open(fscript, "w")
script_fh.write("#! /bin/sh\n\n")
script_fh.write("gnuplot <<EOF > " + fpng + "\n")
script_fh.write("set term png size 800,600 tiny\n")
script_fh.write("set autoscale\n")
script_fh.write("set timefmt \"%s\"\n")
script_fh.write("set xdata time\n")
script_fh.write("set format x \"%H:%M:%S\"\n")
script_fh.write("plot \"" + fdata + "\" using 1:2 with lines title \"" + host + "\"\n")
script_fh.write("EOF\n")
os.chmod(fscript, 0755)
script_fh.close()
print "Now invoke %s to generate %s" % (fscript, fpng)
|
gpl-2.0
| 7,975,171,851,592,360,000
| 19.269231
| 87
| 0.611006
| false
|
Edgar-Bruno/Estudos_iniciais_Python
|
GTK/sudoku/OrdenadasVerificar.py
|
1
|
1348
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
# Verificação de repetição no eixo das abscissas
indexMatriz = 46 # Index da matriz passado no click
i = indexMatriz
dicOrdenadas = {} # Key é o index da listaMatriz, value é o existe ou não
listaMatriz = [6, 2, 4, 1, 5, 9, 7, 6, 8,
9, 7, 8, 2, 6, 3, 1, 9, 4,
5, 6, 2, 3, 8, 7, 4, 9, 1,
1, 5, 7, 8, 3, 4, 2, 6, 9,
4, 1, 1, 6, 7, 8, 5, 2, 3,
7, 8, 5, 9, 7, 2, 3, 7, 6,
3, 4, 9, 7, 2, 6, 8, 1, 5,
8, 3, 6, 5, 4, 1, 9, 7, 2,
2, 1, 3, 4, 9, 5, 6, 8, 7]
"""
[6, 2, 4, 1, 5, 9, 7, 6, 8,
9, 7, 8, 2, 6, 3, 1, 9, 4,
5, 6, 2, 3, 8, 7, 4, 9, 1,
1, 5, 7, 8, 3, 4, 2, 6, 9,
4, 9, 1, 6, 7, 8, 5, 2, 3,
7, 8, 5, 9, 7, 2, 3, 7, 6,
3, 4, 9, 7, 2, 6, 8, 1, 5,
8, 3, 6, 5, 4, 1, 9, 7, 2,
2, 1, 3, 4, 9, 5, 6, 8, 7]
"""
i = indexMatriz
while i >= 9:
i -= 9
listaOrdenadasTMP = listaMatriz[i::9] # lista da linha pesquisada. Index inicial = 0
print listaOrdenadasTMP
print "------------------------ "
for ii in range(9):
vaux = listaOrdenadasTMP[ii]
listaOrdenadasTMP[ii] = None
print listaOrdenadasTMP
if vaux in listaOrdenadasTMP:
dicOrdenadas[i] = True
else:
dicOrdenadas[i] = False
i += 9
listaOrdenadasTMP[ii] = vaux # Retorna o valor original
print dicOrdenadas
dicValid = [True, True, True]
if False in dicValid:
print "AQUI!!!!! "
print dicValid
|
gpl-3.0
| -7,201,601,456,120,977,000
| 18.735294
| 84
| 0.545116
| false
|
alirizakeles/tendenci
|
tendenci/apps/profiles/management/commands/update_admin_group_perms.py
|
1
|
1507
|
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import Group as Auth_Group, Permission
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
class Command(BaseCommand):
# create an admin auth group if it doesn't exists, and assign all permissions
# (except 4 auth permission the auth_user, auth_groups..) to this group
# command to run: python manage.py update_admin_group_perms
def handle(self, *args, **options):
out = ''
if hasattr(settings, 'ADMIN_AUTH_GROUP_NAME'):
name = settings.ADMIN_AUTH_GROUP_NAME
else:
name = 'Admin'
try:
auth_group = Auth_Group.objects.get(name=name)
except Auth_Group.DoesNotExist:
auth_group = Auth_Group(name=name)
auth_group.save()
#self.stdout.write('Successfully created an auth group "Admin".')
out = 'Successfully created an auth group "Admin".\n'
# assign permission to group, but exclude the auth content
content_to_exclude = ContentType.objects.filter(app_label='auth')
permissions = Permission.objects.all().exclude(content_type__in=content_to_exclude)
auth_group.permissions = permissions
auth_group.save()
#self.stdout.write('Successfully added all permissions to group "Admin".')
out += 'Successfully added/updated all permissions to group "%s".' % name
print out
|
gpl-3.0
| 9,070,616,667,420,341,000
| 43.323529
| 91
| 0.668879
| false
|
adrianbeloqui/Python
|
nested_lists.py
|
1
|
1675
|
"""Given the names and grades for each student in a Physics class of N
students, store them in a nested list and print the name(s) of any
student(s) having the second lowest grade.
Note: If there are multiple students with the same grade, order their
names alphabetically and print each name on a new line.
Input Format
The first line contains an integer, N, the number of students.
The subsequent lines describe each student over 2N lines; the first
line contains a student's name, and the second line contains their
grade.
Constraints
2 <= N <= 5
There will always be one or more students having the second lowest
grade.
Output Format
Print the name(s) of any student(s) having the second lowest grade
in Physics; if there are multiple students, order their names
alphabetically and print each one on a new line.
"""
from operator import itemgetter
def second_lowest(*args):
arr = args[0]
lowest, higher_lowest = arr[0], ["", 100]
for student in arr:
if student[1] < higher_lowest[1]:
if student[1] < lowest[1]:
higher_lowest, lowest = lowest, student
elif student[1] == lowest[1]:
continue
else:
higher_lowest = student
return higher_lowest[1]
if __name__ == '__main__':
students = []
for _ in range(int(input())):
name = input()
score = float(input())
students.append([name, score])
second_largest_grade = second_lowest(students)
result_list = list(filter(lambda x: x[1] == second_largest_grade, students))
result_list.sort(key=itemgetter(0))
for student in result_list:
print(student[0])
|
mit
| -7,068,788,371,389,833,000
| 29.472727
| 80
| 0.666269
| false
|
rolobio/sshm
|
sshm/main.py
|
1
|
5215
|
#! /usr/bin/env python3
"""
This module allows the console to use SSHM's functionality.
This module should only be run by the console!
from dunder_mifflin import papers # WARNING: Malicious operation ahead
"""
from __future__ import print_function
import sys
try: # pragma: no cover version specific
from lib import sshm
except ImportError: # pragma: no cover version specific
from sshm.lib import sshm
__all__ = ['main']
def get_argparse_args(args=None):
"""
Get the arguments passed to this script when it was run.
@param args: A list of arguments passed in the console.
@type args: list
@returns: A tuple containing (args, command, extra_args)
@rtype: tuple
"""
try: # pragma: no cover
from _info import __version__, __long_description__
except ImportError: # pragma: no cover
from sshm._info import __version__, __long_description__
import argparse
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=__long_description__)
parser.add_argument('servers', nargs='+')
parser.add_argument('command')
parser.add_argument('-s', '--sorted-output', action='store_true', default=False,
help='Sort the output by the URI of each instance. This will wait for all instances to finish before showing any output!')
parser.add_argument('-p', '--strip-whitespace', action='store_true', default=False,
help='Remove any whitespace surrounding the output of each instance.')
parser.add_argument('-d', '--disable-formatting', action='store_true', default=False,
help='Disable command formatting.')
parser.add_argument('-u', '--quiet', action='store_true', default=False,
help="Hide SSHM's server information on output (this implies sorted).")
parser.add_argument('-w', '--workers', type=int, default=20,
help="Limit the amount of concurrent SSH connections.")
parser.add_argument('--version', action='version', version='%(prog)s '+__version__)
args, extra_args = parser.parse_known_args(args=args)
# Move any servers that start with a - to extra_args
new_servers = []
for i in args.servers:
if i.startswith('-'):
extra_args.append(i)
else:
new_servers.append(i)
args.servers = new_servers
# If the comand starts with a -, replace it with the last server and
# move the command to extra_args.
if args.command.startswith('-'):
extra_args.append(args.command)
args.command = args.servers.pop(-1)
if args.quiet:
args.sorted_output = True
return (args, args.command, extra_args)
def _print_handling_newlines(uri, return_code, to_print, header='', strip_whitespace=False, quiet=False, file=sys.stdout):
"""
Print "to_print" to "file" with the formatting needed to represent it's data
properly.
"""
if strip_whitespace:
to_print = to_print.strip()
if to_print.count('\n') == 0:
sep = ' '
else:
sep = '\n'
output_str = 'sshm: {header}{uri}({return_code}):{sep}{to_print}'
if quiet:
output_str = '{to_print}'
print(output_str.format(header=header,
uri=uri,
return_code=return_code,
sep=sep,
to_print=to_print), file=file)
def main():
"""
Run SSHM using console provided arguments.
This should only be run using a console!
"""
import select
args, command, extra_arguments = get_argparse_args()
# Only provided stdin if there is data
r_list, i, i = select.select([sys.stdin], [], [], 0)
if r_list:
stdin = r_list[0]
else:
stdin = None
# Perform the command on each server, print the results to stdout.
results = sshm(args.servers, command, extra_arguments, stdin, args.disable_formatting, args.workers)
# If a sorted output is requested, gather all results before output.
if args.sorted_output:
results = list(results)
results = sorted(results, key=lambda x: x['uri'])
exit_code = 0
for result in results:
exit_code = exit_code or result.get('return_code')
if result.get('stdout') != None:
_print_handling_newlines(result['uri'],
result['return_code'],
result['stdout'],
strip_whitespace=args.strip_whitespace,
quiet=args.quiet,
)
if result.get('stderr'):
_print_handling_newlines(result['uri'],
result.get('return_code', ''),
result['stderr'],
'Error: ',
strip_whitespace=args.strip_whitespace,
quiet=args.quiet,
file=sys.stderr,
)
if result.get('traceback'):
_print_handling_newlines(result['uri'],
result['traceback'],
'Traceback: ',
strip_whitespace=args.strip_whitespace,
quiet=args.quiet,
file=sys.stderr,
)
# Exit with non-zero when there is a failure
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
gpl-2.0
| 6,128,735,719,583,437,000
| 33.536424
| 135
| 0.595781
| false
|
pombredanne/django-avocado
|
avocado/modeltree.py
|
1
|
22222
|
import inspect
from django.db import models
from django.db.models import Q
from django.core.exceptions import ImproperlyConfigured
from avocado.conf import settings
__all__ = ('ModelTree',)
DEFAULT_MODELTREE_ALIAS = 'default'
class ModelTreeNode(object):
def __init__(self, model, parent=None, rel_type=None, rel_reversed=None,
related_name=None, accessor_name=None, depth=0):
"""Defines attributes of a `model' and the relationship to the parent
model.
`name' - the `model's class name
`db_table' - the model's database table name
`pk_field' - the model's primary key field
`parent' - a reference to the parent ModelTreeNode
`parent_model' - a reference to the `parent' model, since it may be
None
`rel_type' - denotes the _kind_ of relationship with the
following possibilities: 'manytomany', 'onetoone', or 'foreignkey'.
`rel_reversed' - denotes whether this node was derived from a
forward relationship (an attribute lives on the parent model) or
a reverse relationship (an attribute lives on this model).
`related_name' - is the query string representation which is used
when querying via the ORM.
`accessor_name' - can be used when accessing the model object's
attributes e.g. getattr(obj, accessor_name). this is relative to
the parent model.
`depth' - the depth of this node relative to the root (zero-based
index)
`children' - a list containing the child nodes
"""
self.model = model
self.name = model.__name__
self.db_table = model._meta.db_table
self.pk_field = model._meta.pk.column
self.parent = parent
self.parent_model = parent and parent.model or None
self.rel_type = rel_type
self.rel_reversed = rel_reversed
self.related_name = related_name
self.accessor_name = accessor_name
self.depth = depth
self.children = []
def __str__(self):
return '%s via %s' % (self.name, self.parent_model.__name__)
def _get_m2m_db_table(self):
f = getattr(self.parent_model, self.accessor_name)
if self.rel_reversed:
return f.related.field.m2m_db_table()
else:
return f.field.m2m_db_table()
m2m_db_table = property(_get_m2m_db_table)
def _get_m2m_field(self):
f = getattr(self.parent_model, self.accessor_name)
if self.rel_reversed:
return f.related.field.m2m_column_name()
else:
return f.field.m2m_column_name()
m2m_field = property(_get_m2m_field)
def _get_m2m_reverse_field(self):
f = getattr(self.parent_model, self.accessor_name)
if self.rel_reversed:
return f.related.field.m2m_reverse_name()
else:
return f.field.m2m_reverse_name()
m2m_reverse_field = property(_get_m2m_reverse_field)
def _get_foreignkey_field(self):
f = getattr(self.parent_model, self.accessor_name)
if self.rel_reversed:
return f.related.field.column
else:
return f.field.column
foreignkey_field = property(_get_foreignkey_field)
def _get_join_connections(self):
"""Returns a list of connections that need to be added to a
QuerySet object that properly joins this model and the parent.
"""
if not hasattr(self, '_join_connections'):
connections = []
# setup initial FROM clause
connections.append((None, self.parent.db_table, None, None))
# setup two connections for m2m
if self.rel_type == 'manytomany':
c1 = (
self.parent.db_table,
self.m2m_db_table,
self.parent.pk_field,
self.rel_reversed and self.m2m_reverse_field or \
self.m2m_field,
)
c2 = (
self.m2m_db_table,
self.db_table,
self.rel_reversed and self.m2m_field or \
self.m2m_reverse_field,
self.pk_field,
)
connections.append(c1)
connections.append(c2)
else:
c1 = (
self.parent.db_table,
self.db_table,
self.rel_reversed and self.parent.pk_field or \
self.foreignkey_field,
self.rel_reversed and self.foreignkey_field or \
self.parent.pk_field,
)
connections.append(c1)
self._join_connections = connections
return self._join_connections
join_connections = property(_get_join_connections)
def remove_child(self, model):
for i, cnode in enumerate(self.children):
if cnode.model is model:
return self.children.pop(i)
class ModelTree(object):
"""A class to handle building and parsing a tree structure given a model.
`root_model' - the model of interest in which everything is relatively
defined
`exclude' - a list of models that are not to be added to the tree
"""
def __init__(self, root_model, exclude=(), routes=()):
self.root_model = self._get_model(root_model)
self.exclude = map(self._get_model, exclude)
self._rts, self._tos = self._build_routes(routes)
self._tree_hash = {}
def check(self, queryset):
if queryset.model is self.root_model:
return True
return False
def _get_model(self, label):
# model class
if inspect.isclass(label) and issubclass(label, models.Model):
return label
# passed as a label string
elif isinstance(label, basestring):
app_label, model_label = label.lower().split('.')
model = models.get_model(app_label, model_label)
if model:
return model
raise TypeError, 'model "%s" could not be found' % label
def _build_routes(self, routes):
"""
Routes provide a means of specifying JOINs between two tables.
The minimum information necessary to define an explicit JOIN is as
follows:
'from_label' - defines the model on the right side of the join
'to_label' - defines the model on the left side of the join
'join_field' - defines the field in which the join will occur
'symmetrical' - defines whether the same join will be constructed
if the 'from_model' and 'to_model' are reversed
"""
rts = {}
tos = {}
for route in routes:
# unpack
from_label, to_label, join_field, symmetrical = route
# get models
from_model = self._get_model(from_label)
to_model = self._get_model(to_label)
# get field
if join_field is not None:
model_name, field_name = join_field.split('.')
model_name = model_name.lower()
if model_name == from_model.__name__.lower():
field = from_model._meta.get_field_by_name(field_name)[0]
elif model_name == to_model.__name__.lower():
field = to_model._meta.get_field_by_name(field_name)[0]
else:
raise TypeError, 'model for join_field, "%s", does not match' % field_name
if field is None:
raise TypeError, 'field "%s" not found'
else:
field = None
if field:
rts[(from_model, to_model)] = field
if symmetrical:
rts[(to_model, from_model)] = field
else:
tos[to_model] = from_model
return rts, tos
def _filter_one2one(self, field):
"""Tests if this field is a OneToOneField. If a route exists for this
field's model and it's target model, ensure this is the field that
should be used to join the the two tables.
"""
if isinstance(field, models.OneToOneField):
# route has been defined with a specific field required
tup = (field.model, field.rel.to)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return field
def _filter_related_one2one(self, rel):
"""Tests if this RelatedObject represents a OneToOneField. If a route
exists for this field's model and it's target model, ensure this is
the field that should be used to join the the two tables.
"""
field = rel.field
if isinstance(field, models.OneToOneField):
# route has been defined with a specific field required
tup = (rel.model, field.model)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return rel
def _filter_fk(self, field):
"""Tests if this field is a ForeignKey. If a route exists for this
field's model and it's target model, ensure this is the field that
should be used to join the the two tables.
"""
if isinstance(field, models.ForeignKey):
# route has been defined with a specific field required
tup = (field.model, field.rel.to)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return field
def _filter_related_fk(self, rel):
"""Tests if this RelatedObject represents a ForeignKey. If a route
exists for this field's model and it's target model, ensure this is
the field that should be used to join the the two tables.
"""
field = rel.field
if isinstance(field, models.ForeignKey):
# route has been defined with a specific field required
tup = (rel.model, field.model)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return rel
def _filter_m2m(self, field):
"""Tests if this field is a ManyToManyField. If a route exists for this
field's model and it's target model, ensure this is the field that
should be used to join the the two tables.
"""
if isinstance(field, models.ManyToManyField):
# route has been defined with a specific field required
tup = (field.model, field.rel.to)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return field
def _filter_related_m2m(self, rel):
"""Tests if this RelatedObject represents a ManyToManyField. If a route
exists for this field's model and it's target model, ensure this is
the field that should be used to join the the two tables.
"""
field = rel.field
if isinstance(field, models.ManyToManyField):
# route has been defined with a specific field required
tup = (rel.model, field.model)
# skip if not the correct field
if self._rts.has_key(tup) and self._rts.get(tup) is not field:
return
return rel
def _add_node(self, parent, model, rel_type, rel_reversed, related_name,
accessor_name, depth):
"""Adds a node to the tree only if a node of the same `model' does not
already exist in the tree with smaller depth. If the node is added, the
tree traversal continues finding the node's relations.
Conditions in which the node will fail to be added:
- the model is excluded completely
- the model is going back the same path it came from
- the model is circling back to the root_model
- the model does not come from the parent.model (via _tos)
"""
exclude = set(self.exclude + [parent.parent_model, self.root_model])
# ignore excluded models and prevent circular paths
if model in exclude:
return
# if a route exists, only allow the model to be added if coming from the
# specified parent.model
if self._tos.has_key(model) and self._tos.get(model) is not parent.model:
return
node_hash = self._tree_hash.get(model, None)
# don't add node if a path with a shorter depth exists. this is applied
# after the correct join has been determined. generally if a route is
# defined for relation, this will never be an issue since there would
# only be one path available. if a route is not defined, the shorter
# path will be found
if not node_hash or node_hash['depth'] > depth:
if node_hash:
node_hash['parent'].remove_child(model)
node = ModelTreeNode(model, parent, rel_type, rel_reversed,
related_name, accessor_name, depth)
self._tree_hash[model] = {'parent': parent, 'depth': depth,
'node': node}
node = self._find_relations(node, depth)
parent.children.append(node)
del node
def _find_relations(self, node, depth=0):
"""Finds all relations given a node.
NOTE: the many-to-many relations are evaluated first to prevent
'through' models being bound as a ForeignKey relationship.
"""
depth += 1
model = node.model
# determine relational fields to determine paths
forward_fields = model._meta.fields
reverse_fields = model._meta.get_all_related_objects()
forward_o2o = filter(self._filter_one2one, forward_fields)
reverse_o2o = filter(self._filter_related_one2one, reverse_fields)
forward_fk = filter(self._filter_fk, forward_fields)
reverse_fk = filter(self._filter_related_fk, reverse_fields)
forward_m2m = filter(self._filter_m2m, model._meta.many_to_many)
reverse_m2m = filter(self._filter_related_m2m, model._meta.get_all_related_many_to_many_objects())
# iterate m2m relations
for f in forward_m2m:
kwargs = {
'parent': node,
'model': f.rel.to,
'rel_type': 'manytomany',
'rel_reversed': False,
'related_name': f.name,
'accessor_name': f.name,
'depth': depth,
}
self._add_node(**kwargs)
# iterate over related m2m fields
for r in reverse_m2m:
kwargs = {
'parent': node,
'model': r.model,
'rel_type': 'manytomany',
'rel_reversed': True,
'related_name': r.field.related_query_name(),
'accessor_name': r.get_accessor_name(),
'depth': depth,
}
self._add_node(**kwargs)
# iterate over one2one fields
for f in forward_o2o:
kwargs = {
'parent': node,
'model': f.rel.to,
'rel_type': 'onetoone',
'rel_reversed': False,
'related_name': f.name,
'accessor_name': f.name,
'depth': depth,
}
self._add_node(**kwargs)
# iterate over related one2one fields
for r in reverse_o2o:
kwargs = {
'parent': node,
'model': r.model,
'rel_type': 'onetoone',
'rel_reversed': True,
'related_name': r.field.related_query_name(),
'accessor_name': r.get_accessor_name(),
'depth': depth,
}
self._add_node(**kwargs)
# iterate over fk fields
for f in forward_fk:
kwargs = {
'parent': node,
'model': f.rel.to,
'rel_type': 'foreignkey',
'rel_reversed': False,
'related_name': f.name,
'accessor_name': f.name,
'depth': depth,
}
self._add_node(**kwargs)
# iterate over related foreign keys
for r in reverse_fk:
kwargs = {
'parent': node,
'model': r.model,
'rel_type': 'foreignkey',
'rel_reversed': True,
'related_name': r.field.related_query_name(),
'accessor_name': r.get_accessor_name(),
'depth': depth,
}
self._add_node(**kwargs)
return node
def _get_root_node(self):
"Sets the `root_node' and implicitly builds the entire tree."
if not hasattr(self, '_root_node'):
node = ModelTreeNode(self.root_model)
self._root_node = self._find_relations(node)
self._tree_hash[self.root_model] = {'parent': None, 'depth': 0,
'node': self._root_node}
return self._root_node
root_node = property(_get_root_node)
def _find_path(self, model, node, node_path=[]):
if node.model == model:
return node_path
for cnode in node.children:
mpath = self._find_path(model, cnode, node_path + [cnode])
if mpath:
return mpath
def path_to(self, model):
"Returns a list of nodes thats defines the path of traversal."
model = self._get_model(model)
return self._find_path(model, self.root_node)
def path_to_with_root(self, model):
"""Returns a list of nodes thats defines the path of traversal
including the root node.
"""
model = self._get_model(model)
return self._find_path(model, self.root_node, [self.root_node])
def get_node_by_model(self, model):
"Finds the node with the specified model."
model = self._get_model(model)
if not self._tree_hash:
self.root_node
val = self._tree_hash.get(model, None)
if val is None:
return
return val['node']
def query_string(self, node_path, field_name, operator=None):
"Returns a query string given a path"
toks = [n.related_name for n in node_path] + [field_name]
if operator is not None:
toks.append(operator)
return str('__'.join(toks))
def q(self, node_path, field_name, value, operator=None):
"Returns a Q object."
key = self.query_string(node_path, field_name, operator)
return Q(**{key: value})
def accessor_names(self, node_path):
"""Returns a list of the accessor names given a list of nodes. This is
most useful when needing to dynamically access attributes starting from
an instance of the `root_node' object.
"""
return [n.accessor_name for n in node_path]
def get_all_join_connections(self, node_path):
"""Returns a list of JOIN connections that can be manually applied to a
QuerySet object, e.g.:
queryset = SomeModel.objects.all()
modeltree = ModelTree(SomeModel)
nodes = modeltree.path_to(SomeOtherModel)
conns = modeltree.get_all_join_connections(nodes)
for c in conns:
queryset.query.join(c, promote=True)
This allows for the ORM to handle setting up the JOINs which may be
different depending the QuerySet being altered.
"""
connections = []
for i,node in enumerate(node_path):
if i == 0:
connections.extend(node.join_connections)
else:
connections.extend(node.join_connections[1:])
return connections
def add_joins(self, model, queryset, **kwargs):
model = self._get_model(model)
clone = queryset._clone()
nodes = self.path_to(model)
conns = self.get_all_join_connections(nodes)
for c in conns:
clone.query.join(c, **kwargs)
return clone
def print_path(self, node=None, depth=0):
"Traverses the entire tree and prints a hierarchical view to stdout."
if node is None:
node = self.root_node
if node:
print '- ' * depth * 2, '"%s"' % node.name, 'at a depth of', node.depth
if node.children:
depth += 1
for x in node.children:
self.print_path(x, depth)
def get_accessor_pairs(self, node_path):
"Used for testing purposes."
accessor_names = self.accessor_names(node_path)
node_path = node_path[:-1] # don't need the last item
if len(node_path) == 0 or node_path[0] is not self.root_node:
node_path = [self.root_node] + node_path
else:
accessor_names = accessor_names[1:]
return zip(node_path, accessor_names)
def get_queryset(self):
"Returns a QuerySet relative to the ``root_model``."
return self.root_model.objects.all()
class LazyModelTree(object):
def __init__(self, modeltrees):
self.modeltrees = modeltrees
self._modeltrees = {}
def __getitem__(self, alias):
if not self.modeltrees:
raise ImproperlyConfigured, 'You must at least specify the "%s" ' \
'modeltree config' % DEFAULT_MODELTREE_ALIAS
if alias not in self._modeltrees:
try:
kwargs = self.modeltrees[alias]
except KeyError:
raise KeyError, 'No modeltree settings defined for "%s"' % alias
self._modeltrees[alias] = ModelTree(**kwargs)
return self._modeltrees[alias]
trees = LazyModelTree(settings.MODELTREES)
|
bsd-3-clause
| -7,547,567,735,134,848,000
| 36.160535
| 106
| 0.563451
| false
|
geminy/aidear
|
snippets/cppfunc/cppcheck/cppcheck-1.80/addons/cert.py
|
1
|
2959
|
#!/usr/bin/env python
#
# Cert: Some extra CERT checkers
#
# Cppcheck itself handles many CERT rules. Cppcheck warns when there is undefined behaviour.
#
# Example usage of this addon (scan a sourcefile main.cpp)
# cppcheck --dump main.cpp
# python cert.py main.cpp.dump
import cppcheckdata
import sys
import re
def reportError(token, severity, msg):
sys.stderr.write(
'[' + token.file + ':' + str(token.linenr) + '] (' + severity + ') cert.py: ' + msg + '\n')
def isUnpackedStruct(var):
decl = var.typeStartToken
while decl and decl.isName:
if decl.str == 'struct':
structScope = decl.next.typeScope
if structScope:
linenr = int(structScope.classStart.linenr)
for line in open(structScope.classStart.file):
linenr -= 1
if linenr == 0:
return True
if re.match(r'#pragma\s+pack\s*\(', line):
return False
break
decl = decl.next
return False
def isLocalUnpackedStruct(arg):
if arg and arg.str == '&' and not arg.astOperand2:
arg = arg.astOperand1
return arg and arg.variable and (arg.variable.isLocal or arg.variable.isArgument) and isUnpackedStruct(arg.variable)
def isBitwiseOp(token):
return token and (token.str in {'&', '|', '^'})
def isComparisonOp(token):
return token and (token.str in {'==', '!=', '>', '>=', '<', '<='})
# EXP42-C
# do not compare padding data
def exp42(data):
for token in data.tokenlist:
if token.str != '(' or not token.astOperand1:
continue
arg1 = None
arg2 = None
if token.astOperand2 and token.astOperand2.str == ',':
if token.astOperand2.astOperand1 and token.astOperand2.astOperand1.str == ',':
arg1 = token.astOperand2.astOperand1.astOperand1
arg2 = token.astOperand2.astOperand1.astOperand2
if token.astOperand1.str == 'memcmp' and (isLocalUnpackedStruct(arg1) or isLocalUnpackedStruct(arg2)):
reportError(
token, 'style', "EXP42-C Comparison of struct padding data " +
"(fix either by packing the struct using '#pragma pack' or by rewriting the comparison)")
# EXP46-C
# Do not use a bitwise operator with a Boolean-like operand
# int x = (a == b) & c;
def exp46(data):
for token in data.tokenlist:
if isBitwiseOp(token) and (isComparisonOp(token.astOperand1) or isComparisonOp(token.astOperand2)):
reportError(
token, 'style', 'EXP46-C Bitwise operator is used with a Boolean-like operand')
for arg in sys.argv[1:]:
print('Checking ' + arg + '...')
data = cppcheckdata.parsedump(arg)
for cfg in data.configurations:
if len(data.configurations) > 1:
print('Checking ' + arg + ', config "' + cfg.name + '"...')
exp42(cfg)
exp46(cfg)
|
gpl-3.0
| -200,426,525,887,534,100
| 32.247191
| 120
| 0.604258
| false
|
Azure/azure-sdk-for-python
|
sdk/recoveryservices/azure-mgmt-recoveryservices/azure/mgmt/recoveryservices/aio/operations/_replication_usages_operations.py
|
1
|
5558
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ReplicationUsagesOperations:
"""ReplicationUsagesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.recoveryservices.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
resource_group_name: str,
vault_name: str,
**kwargs
) -> AsyncIterable["_models.ReplicationUsageList"]:
"""Fetches the replication usages of the vault.
:param resource_group_name: The name of the resource group where the recovery services vault is
present.
:type resource_group_name: str
:param vault_name: The name of the recovery services vault.
:type vault_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ReplicationUsageList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.recoveryservices.models.ReplicationUsageList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ReplicationUsageList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vaultName': self._serialize.url("vault_name", vault_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ReplicationUsageList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/replicationUsages'} # type: ignore
|
mit
| -4,773,754,817,970,686,000
| 46.504274
| 187
| 0.644476
| false
|
calebbrown/calebcc
|
feedgenerator/feeds.py
|
1
|
8744
|
"""
Syndication feed generation library -- used for generating RSS, etc.
Sample usage:
>>> from django.utils import feedgenerator
>>> feed = feedgenerator.Rss201rev2Feed(
... title=u"Poynter E-Media Tidbits",
... link=u"http://www.poynter.org/column.asp?id=31",
... description=u"A group Weblog by the sharpest minds in online media/journalism/publishing.",
... language=u"en",
... )
>>> feed.add_item(
... title="Hello",
... link=u"http://www.holovaty.com/test/",
... description="Testing."
... )
>>> fp = open('test.rss', 'w')
>>> feed.write(fp, 'utf-8')
>>> fp.close()
For definitions of the different versions of RSS, see:
http://diveintomark.org/archives/2004/02/04/incompatible-rss
"""
from xmlutils import SimplerXMLGenerator
from utils import rfc2822_date, rfc3339_date, get_tag_uri
from base import SyndicationFeed, Enclosure
class RssFeed(SyndicationFeed):
mime_type = 'application/rss+xml'
def write(self, outfile, encoding):
handler = SimplerXMLGenerator(outfile, encoding)
handler.startDocument()
handler.startElement(u"rss", self.rss_attributes())
handler.startElement(u"channel", self.root_attributes())
self.add_root_elements(handler)
self.write_items(handler)
self.endChannelElement(handler)
handler.endElement(u"rss")
def rss_attributes(self):
return {u"version": self._version,
u"xmlns:atom": u"http://www.w3.org/2005/Atom"}
def write_items(self, handler):
for item in self.items:
handler.startElement(u'item', self.item_attributes(item))
self.add_item_elements(handler, item)
handler.endElement(u"item")
def add_root_elements(self, handler):
handler.addQuickElement(u"title", self.feed['title'])
handler.addQuickElement(u"link", self.feed['link'])
handler.addQuickElement(u"description", self.feed['description'])
handler.addQuickElement(u"atom:link", None, {u"rel": u"self", u"href": self.feed['feed_url']})
if self.feed['language'] is not None:
handler.addQuickElement(u"language", self.feed['language'])
for cat in self.feed['categories']:
handler.addQuickElement(u"category", cat)
if self.feed['feed_copyright'] is not None:
handler.addQuickElement(u"copyright", self.feed['feed_copyright'])
handler.addQuickElement(u"lastBuildDate", rfc2822_date(self.latest_post_date()).decode('utf-8'))
if self.feed['ttl'] is not None:
handler.addQuickElement(u"ttl", self.feed['ttl'])
def endChannelElement(self, handler):
handler.endElement(u"channel")
class RssUserland091Feed(RssFeed):
_version = u"0.91"
def add_item_elements(self, handler, item):
handler.addQuickElement(u"title", item['title'])
handler.addQuickElement(u"link", item['link'])
if item['description'] is not None:
handler.addQuickElement(u"description", item['description'])
class Rss201rev2Feed(RssFeed):
# Spec: http://blogs.law.harvard.edu/tech/rss
_version = u"2.0"
def add_item_elements(self, handler, item):
handler.addQuickElement(u"title", item['title'])
handler.addQuickElement(u"link", item['link'])
if item['description'] is not None:
handler.addQuickElement(u"description", item['description'])
# Author information.
if item["author_name"] and item["author_email"]:
handler.addQuickElement(u"author", "%s (%s)" % \
(item['author_email'], item['author_name']))
elif item["author_email"]:
handler.addQuickElement(u"author", item["author_email"])
elif item["author_name"]:
handler.addQuickElement(u"dc:creator", item["author_name"], {u"xmlns:dc": u"http://purl.org/dc/elements/1.1/"})
if item['pubdate'] is not None:
handler.addQuickElement(u"pubDate", rfc2822_date(item['pubdate']).decode('utf-8'))
if item['comments'] is not None:
handler.addQuickElement(u"comments", item['comments'])
if item['unique_id'] is not None:
handler.addQuickElement(u"guid", item['unique_id'])
if item['ttl'] is not None:
handler.addQuickElement(u"ttl", item['ttl'])
# Enclosure.
if item['enclosure'] is not None:
handler.addQuickElement(u"enclosure", '',
{u"url": item['enclosure'].url, u"length": item['enclosure'].length,
u"type": item['enclosure'].mime_type})
# Categories.
for cat in item['categories']:
handler.addQuickElement(u"category", cat)
class Atom1Feed(SyndicationFeed):
# Spec: http://atompub.org/2005/07/11/draft-ietf-atompub-format-10.html
mime_type = 'application/atom+xml; charset=utf8'
ns = u"http://www.w3.org/2005/Atom"
def write(self, outfile, encoding):
handler = SimplerXMLGenerator(outfile, encoding)
handler.startDocument()
handler.startElement(u'feed', self.root_attributes())
self.add_root_elements(handler)
self.write_items(handler)
handler.endElement(u"feed")
def root_attributes(self):
if self.feed['language'] is not None:
return {u"xmlns": self.ns, u"xml:lang": self.feed['language']}
else:
return {u"xmlns": self.ns}
def add_root_elements(self, handler):
handler.addQuickElement(u"title", self.feed['title'])
handler.addQuickElement(u"link", "", {u"rel": u"alternate", u"href": self.feed['link']})
if self.feed['feed_url'] is not None:
handler.addQuickElement(u"link", "", {u"rel": u"self", u"href": self.feed['feed_url']})
handler.addQuickElement(u"id", self.feed['id'])
handler.addQuickElement(u"updated", rfc3339_date(self.latest_post_date()).decode('utf-8'))
if self.feed['author_name'] is not None:
handler.startElement(u"author", {})
handler.addQuickElement(u"name", self.feed['author_name'])
if self.feed['author_email'] is not None:
handler.addQuickElement(u"email", self.feed['author_email'])
if self.feed['author_link'] is not None:
handler.addQuickElement(u"uri", self.feed['author_link'])
handler.endElement(u"author")
if self.feed['subtitle'] is not None:
handler.addQuickElement(u"subtitle", self.feed['subtitle'])
for cat in self.feed['categories']:
handler.addQuickElement(u"category", "", {u"term": cat})
if self.feed['feed_copyright'] is not None:
handler.addQuickElement(u"rights", self.feed['feed_copyright'])
def write_items(self, handler):
for item in self.items:
handler.startElement(u"entry", self.item_attributes(item))
self.add_item_elements(handler, item)
handler.endElement(u"entry")
def add_item_elements(self, handler, item):
handler.addQuickElement(u"title", item['title'])
handler.addQuickElement(u"link", u"", {u"href": item['link'], u"rel": u"alternate"})
if item['pubdate'] is not None:
handler.addQuickElement(u"updated", rfc3339_date(item['pubdate']).decode('utf-8'))
# Author information.
if item['author_name'] is not None:
handler.startElement(u"author", {})
handler.addQuickElement(u"name", item['author_name'])
if item['author_email'] is not None:
handler.addQuickElement(u"email", item['author_email'])
if item['author_link'] is not None:
handler.addQuickElement(u"uri", item['author_link'])
handler.endElement(u"author")
# Unique ID.
if item['unique_id'] is not None:
unique_id = item['unique_id']
else:
unique_id = get_tag_uri(item['link'], item['pubdate'])
handler.addQuickElement(u"id", unique_id)
# Summary.
if item['description'] is not None:
handler.addQuickElement(u"summary", item['description'], {u"type": u"html"})
# Enclosure.
if item['enclosure'] is not None:
handler.addQuickElement(u"link", '',
{u"rel": u"enclosure",
u"href": item['enclosure'].url,
u"length": item['enclosure'].length,
u"type": item['enclosure'].mime_type})
# Categories.
for cat in item['categories']:
handler.addQuickElement(u"category", u"", {u"term": cat})
# Rights.
if item['item_copyright'] is not None:
handler.addQuickElement(u"rights", item['item_copyright'])
|
bsd-3-clause
| -3,889,700,314,081,301,500
| 41.653659
| 123
| 0.613564
| false
|
KnechtRootrechtCH/Mimir
|
content/Mods/XVM/XVM_Base/res_mods_content/mods/xfw_packages/xvm_main/python/topclans.py
|
1
|
2348
|
""" XVM (c) https://modxvm.com 2013-2020 """
import config
from logger import *
# PUBLIC
def getClanInfo(clanAbbrev):
global _clansInfo
if _clansInfo is None:
return None
rankWGM = None
rankWSH = None
topWGM = _clansInfo.getTopWGMClanInfo(clanAbbrev)
if topWGM is not None:
if not (0 < topWGM['rank'] <= config.networkServicesSettings.topClansCountWgm):
topWGM = None
topWSH = _clansInfo.getTopWSHClanInfo(clanAbbrev)
if topWSH is not None:
if not (0 < topWSH['rank'] <= config.networkServicesSettings.topClansCountWsh):
topWSH = None
# get minimal rank
if topWGM is None and topWSH is None:
return _clansInfo.getPersistClanInfo(clanAbbrev)
else:
if topWGM is None:
return topWSH
elif topWSH is None:
return topWGM
else:
return min(topWGM, topWSH, key=lambda x: x['rank'])
def clear():
global _clansInfo
_clansInfo = None
def update(data):
if data is None:
data = {}
global _clansInfo
_clansInfo = _ClansInfo(data)
class _ClansInfo(object):
__slots__ = ('_persist', '_topWGM', '_topWSH')
def __init__(self, data):
# fix data
# TODO: rename topClans to topClansWGM in XVM API
if 'topClansWGM' not in data and 'topClans' in data:
data['topClansWGM'] = data['topClans']
del data['topClans']
self._persist = data.get('persistClans', {})
self._topWGM = data.get('topClansWGM', {})
self._topWSH = data.get('topClansWSH', {})
# DEBUG
#log(_clansInfo)
#self._topWGM['KTFI'] = {"rank":9,"clan_id":38503,"emblem":"http://static.modxvm.com/emblems/persist/{size}/38503.png"}
#self._topWSH['KTFI'] = {"rank":4,"clan_id":38503,"emblem":"http://static.modxvm.com/emblems/persist/{size}/38503.png"}
#self._persist['KTFI'] = {"rank":0,"clan_id":38503,"emblem":"http://static.modxvm.com/emblems/persist/{size}/38503.png"}
#/DEBUG
def getPersistClanInfo(self, clanAbbrev):
return self._persist.get(clanAbbrev, None)
def getTopWGMClanInfo(self, clanAbbrev):
return self._topWGM.get(clanAbbrev, None)
def getTopWSHClanInfo(self, clanAbbrev):
return self._topWSH.get(clanAbbrev, None)
_clansInfo = None
|
mit
| -2,595,296,232,771,151,400
| 30.72973
| 128
| 0.616695
| false
|
tommy-u/enable
|
enable/wx/base_window.py
|
1
|
19538
|
"""
Defines the concrete top-level Enable 'Window' class for the wxPython GUI
toolkit, based on the kiva agg driver.
"""
from __future__ import absolute_import
import sys
import time
import wx
from traits.api import Any, Instance, Trait
from traitsui.wx.menu import MakeMenu
# Relative imports
from enable.events import MouseEvent, KeyEvent, DragEvent
from enable.abstract_window import AbstractWindow
from .constants import DRAG_RESULTS_MAP, POINTER_MAP, KEY_MAP
try:
from pyface.wx.drag_and_drop import clipboard, PythonDropTarget
except ImportError:
clipboard = None
PythonDropTarget = None
#-------------------------------------------------------------------------------
# Constants:
#-------------------------------------------------------------------------------
# Number of pixels to scroll at a time:
scroll_incr = 16
# Reusable instance to avoid constructor/destructor overhead:
wx_rect = wx.Rect( 0, 0, 0, 0 )
# Default 'fake' start event for wxPython based drag operations:
default_start_event = MouseEvent()
# To conserve system resources, there is only one 'timer' per program:
system_timer = None
class EnableTimer ( wx.Timer ):
"""
This class maintains a 'work list' of scheduled components, where
each item in the list has the form: [ component, interval, timer_pop_time ]
"""
def __init__ ( self ):
wx.Timer.__init__( self )
self._work_list = []
return
def schedule ( self, component, interval ):
"Schedule a timer event for a specified component"
work_list = self._work_list
if len( work_list ) == 0:
self.Start( 5, oneShot=False )
for i, item in enumerate( work_list ):
if component is item[0]:
del work_list[i]
break
self.reschedule( component, interval )
return
def reschedule ( self, component, interval ):
"Reshedule a recurring timer event for a component"
pop_time = time.time() + interval
new_item = [ component, interval, pop_time ]
work_list = self._work_list
for i, item in enumerate( work_list ):
if pop_time < item[2]:
work_list.insert( i, new_item )
break
else:
work_list.append( new_item )
return
def cancel ( self, component ):
"Cancel any pending timer events for a component"
work_list = self._work_list
for i, item in enumerate( work_list ):
if component is item[0]:
del work_list[i]
if len( work_list ) == 0:
self.Stop()
break
return (len( work_list ) != 0)
def Notify ( self ):
"Handle a timer 'pop' event; used for performance testing purposes"
now = time.time()
work_list = self._work_list
n = len( work_list )
i = 0
while (i < n) and (now >= work_list[i][2]):
i += 1
if i > 0:
reschedule = work_list[:i]
del work_list[:i]
for component, interval, ignore in reschedule:
self.reschedule( component, interval )
component.timer = True
return
class LessSuckyDropTarget(PythonDropTarget):
""" The sole purpose of this class is to override the implementation
of OnDragOver() in the parent class to NOT short-circuit return the
'default_drag_result' if the drop_source is None. (The parent class
implementation basically means that everything looks like it's OK to
drop, and the DnD handler doesn't ever get a chance to intercept or
veto.)
"""
def OnDragOver(self, x, y, default_drag_result):
# This is a cut-and-paste job of the parent class implementation.
# Please refer to its comments.
if clipboard.drop_source is not None and \
not clipboard.drop_source.allow_move:
default_drag_result = wx.DragCopy
if hasattr(self.handler, 'wx_drag_over'):
drag_result = self.handler.wx_drag_over(
x, y, clipboard.data, default_drag_result
)
else:
drag_result = default_drag_result
return drag_result
class BaseWindow(AbstractWindow):
# Screen scroll increment amount:
scroll_incr = ( wx.SystemSettings_GetMetric( wx.SYS_SCREEN_Y )
or 768 ) / 20
# Width/Height of standard scrollbars:
scrollbar_dx = wx.SystemSettings_GetMetric( wx.SYS_VSCROLL_X )
scrollbar_dy = wx.SystemSettings_GetMetric( wx.SYS_HSCROLL_Y )
_cursor_color = Any # PZW: figure out the correct type for this...
# Reference to the actual wxPython window:
control = Instance(wx.Window)
# This is set by downstream components to notify us of whether or not
# the current drag operation should return DragCopy, DragMove, or DragNone.
_drag_result = Any
def __init__(self, parent, wid=-1, pos=wx.DefaultPosition,
size=wx.DefaultSize, **traits):
AbstractWindow.__init__(self, **traits)
self._timer = None
self._mouse_captured = False
# Due to wx wonkiness, we don't reliably get cursor position from
# a wx KeyEvent. Thus, we manually keep track of when we last saw
# the mouse and use that information instead. These coordinates are
# in the wx coordinate space, i.e. pre-self._flip_y().
self._last_mouse_pos = (0, 0)
# Create the delegate:
self.control = control = self._create_control(parent, wid, pos, size)
# Set up the 'erase background' event handler:
wx.EVT_ERASE_BACKGROUND( control, self._on_erase_background )
# Set up the 'paint' event handler:
wx.EVT_PAINT( control, self._paint )
wx.EVT_SIZE( control, self._on_size )
# Set up mouse event handlers:
wx.EVT_LEFT_DOWN( control, self._on_left_down )
wx.EVT_LEFT_UP( control, self._on_left_up )
wx.EVT_LEFT_DCLICK( control, self._on_left_dclick )
wx.EVT_MIDDLE_DOWN( control, self._on_middle_down )
wx.EVT_MIDDLE_UP( control, self._on_middle_up )
wx.EVT_MIDDLE_DCLICK( control, self._on_middle_dclick )
wx.EVT_RIGHT_DOWN( control, self._on_right_down )
wx.EVT_RIGHT_UP( control, self._on_right_up )
wx.EVT_RIGHT_DCLICK( control, self._on_right_dclick )
wx.EVT_MOTION( control, self._on_mouse_move )
wx.EVT_ENTER_WINDOW( control, self._on_window_enter )
wx.EVT_LEAVE_WINDOW( control, self._on_window_leave )
wx.EVT_MOUSEWHEEL( control, self._on_mouse_wheel )
# Handle key up/down events:
wx.EVT_KEY_DOWN( control, self._on_key_pressed )
wx.EVT_KEY_UP( control, self._on_key_released )
wx.EVT_CHAR( control, self._on_character )
# Attempt to allow wxPython drag and drop events to be mapped to
# Enable drag events:
# Handle window close and cleanup
wx.EVT_WINDOW_DESTROY(control, self._on_close)
if PythonDropTarget is not None:
control.SetDropTarget( LessSuckyDropTarget( self ) )
self._drag_over = []
# In some cases, on the Mac at least, we never get an initial EVT_SIZE
# since the initial size is correct. Because of this we call _on_size
# here to initialize our bounds.
self._on_size(None)
return
def _create_control(self, parent, wid, pos = wx.DefaultPosition,
size = wx.DefaultSize):
return wx.Window(parent, wid, pos, size, style = wx.CLIP_CHILDREN |
wx.WANTS_CHARS)
def _on_close(self, event):
# Might be scrollbars or other native components under
# us that are generating this event
if event.GetWindow() == self.control:
self._gc = None
wx.EVT_ERASE_BACKGROUND(self.control, None)
wx.EVT_PAINT(self.control, None)
wx.EVT_SIZE(self.control, None)
wx.EVT_LEFT_DOWN(self.control, None)
wx.EVT_LEFT_UP(self.control, None)
wx.EVT_LEFT_DCLICK(self.control, None)
wx.EVT_MIDDLE_DOWN(self.control, None)
wx.EVT_MIDDLE_UP(self.control, None)
wx.EVT_MIDDLE_DCLICK(self.control, None)
wx.EVT_RIGHT_DOWN(self.control, None)
wx.EVT_RIGHT_UP(self.control, None)
wx.EVT_RIGHT_DCLICK(self.control, None)
wx.EVT_MOTION(self.control, None)
wx.EVT_ENTER_WINDOW(self.control, None)
wx.EVT_LEAVE_WINDOW(self.control, None)
wx.EVT_MOUSEWHEEL(self.control, None)
wx.EVT_KEY_DOWN(self.control, None)
wx.EVT_KEY_UP(self.control, None)
wx.EVT_CHAR(self.control, None)
wx.EVT_WINDOW_DESTROY(self.control, None)
self.control.SetDropTarget(None)
self.control = None
self.component.cleanup(self)
self.component.parent = None
self.component.window = None
self.component = None
return
def _flip_y ( self, y ):
"Convert from a Kiva to a wxPython y coordinate"
return int( self._size[1] - 1 - y )
def _on_erase_background ( self, event ):
pass
def _on_size ( self, event ):
dx, dy = self.control.GetSizeTuple()
# do nothing if the new and old sizes are the same
if (self.component.outer_width, self.component.outer_height) == (dx, dy):
return
self.resized = (dx, dy)
if getattr(self.component, "fit_window", False):
self.component.outer_position = [0,0]
self.component.outer_bounds = [dx, dy]
elif hasattr(self.component, "resizable"):
if "h" in self.component.resizable:
self.component.outer_x = 0
self.component.outer_width = dx
if "v" in self.component.resizable:
self.component.outer_y = 0
self.component.outer_height = dy
self.control.Refresh()
return
def _capture_mouse ( self ):
"Capture all future mouse events"
if not self._mouse_captured:
self._mouse_captured = True
self.control.CaptureMouse()
return
def _release_mouse ( self ):
"Release the mouse capture"
if self._mouse_captured:
self._mouse_captured = False
self.control.ReleaseMouse()
return
def _on_key_pressed(self, event):
handled = self._handle_key_event('key_pressed', event)
if not handled:
event.Skip()
def _on_key_released(self, event):
handled = self._handle_key_event('key_released', event)
if not handled:
event.Skip()
def _create_key_event(self, event_type, event):
""" Convert a GUI toolkit keyboard event into a KeyEvent.
"""
if self.focus_owner is None:
focus_owner = self.component
else:
focus_owner = self.focus_owner
if focus_owner is not None:
if event_type == 'character':
key = unichr(event.GetUniChar())
if not key:
return None
else:
key_code = event.GetKeyCode()
if key_code in KEY_MAP:
key = KEY_MAP.get(key_code)
else:
key = unichr(event.GetUniChar()).lower()
# Use the last-seen mouse coordinates instead of GetX/GetY due
# to wx bug.
x, y = self._last_mouse_pos
# Someday when wx does this properly, we can use these instead:
# x = event.GetX()
# y = event.GetY()
return KeyEvent(
event_type = event_type,
character = key,
alt_down = event.AltDown(),
control_down = event.ControlDown(),
shift_down = event.ShiftDown(),
x = x,
y = self._flip_y(y),
event = event,
window = self)
else:
event.Skip()
return None
def _create_mouse_event ( self, event ):
"Convert a GUI toolkit mouse event into a MouseEvent"
if event is not None:
x = event.GetX()
y = event.GetY()
self._last_mouse_pos = (x, y)
mouse_wheel = ((event.GetLinesPerAction() *
event.GetWheelRotation()) /
(event.GetWheelDelta() or 1))
# Note: The following code fixes a bug in wxPython that returns
# 'mouse_wheel' events in screen coordinates, rather than window
# coordinates:
if float(wx.VERSION_STRING[:3]) < 2.8:
if mouse_wheel != 0 and sys.platform == "win32":
x, y = self.control.ScreenToClientXY( x, y )
return MouseEvent( x = x,
y = self._flip_y( y ),
alt_down = event.AltDown(),
control_down = event.ControlDown(),
shift_down = event.ShiftDown(),
left_down = event.LeftIsDown(),
middle_down = event.MiddleIsDown(),
right_down = event.RightIsDown(),
mouse_wheel = mouse_wheel,
window = self )
# If no event specified, make one up:
x, y = wx.GetMousePosition()
x, y = self.control.ScreenToClientXY( x, y )
self._last_mouse_pos = (x, y)
return MouseEvent( x = x,
y = self._flip_y( y ),
alt_down = self.alt_pressed,
control_down = self.ctrl_pressed,
shift_down = self.shift_pressed,
left_down = False,
middle_down = False,
right_down = False,
mouse_wheel = 0,
window = self)
def _create_gc(self, size, pix_format=None):
"Create a Kiva graphics context of a specified size"
raise NotImplementedError
def _redraw(self, coordinates=None):
"Request a redraw of the window"
if coordinates is None:
if self.control:
self.control.Refresh(False)
else:
xl, yb, xr, yt = coordinates
rect = wx_rect
rect.SetX( int( xl ) )
rect.SetY( int( self._flip_y( yt - 1 ) ) )
rect.SetWidth( int( xr - xl ) )
rect.SetHeight( int( yt - yb ) )
if self.control:
self.control.Refresh(False, rect)
return
def _get_control_size ( self ):
"Get the size of the underlying toolkit control"
result = None
if self.control:
result = self.control.GetSizeTuple()
return result
def _window_paint ( self, event):
"Do a GUI toolkit specific screen update"
raise NotImplementedError
def set_pointer ( self, pointer ):
"Set the current pointer (i.e. cursor) shape"
ptr = POINTER_MAP[ pointer ]
if type( ptr ) is int:
POINTER_MAP[ pointer ] = ptr = wx.StockCursor( ptr )
self.control.SetCursor( ptr )
return
def set_tooltip ( self, tooltip ):
"Set the current tooltip for the window"
wx.ToolTip_Enable( False )
self.control.SetToolTip( wx.ToolTip( tooltip ) )
wx.ToolTip_Enable( True )
return
def set_timer_interval ( self, component, interval ):
""" Set up or cancel a timer for a specified component. To cancel the
timer, set interval=None """
global system_timer
if interval is None:
if ((system_timer is not None) and
(not system_timer.cancel( component ))):
system_timer = None
else:
if system_timer is None:
system_timer = EnableTimer()
system_timer.schedule( component, interval )
return
def _set_focus ( self ):
"Sets the keyboard focus to this window"
self.control.SetFocus()
return
def screen_to_window(self, x, y):
pt = wx.Point(x,y)
x,y = self.control.ScreenToClient(pt)
y = self._flip_y(y)
return x,y
def get_pointer_position(self):
"Returns the current pointer position in local window coordinates"
pos = wx.GetMousePosition()
return self.screen_to_window(pos.x, pos.y)
def set_drag_result(self, result):
if result not in DRAG_RESULTS_MAP:
raise RuntimeError, "Unknown drag result '%s'" % result
self._drag_result = DRAG_RESULTS_MAP[result]
return
def wx_dropped_on ( self, x, y, drag_object, drop_result ):
"Handle wxPython drag and drop events"
# Process the 'dropped_on' event for the object(s) it was dropped on:
y = self._flip_y(y)
drag_event = DragEvent(x=x, y=y, obj=drag_object, window=self)
self._drag_result = wx.DragNone
if self.component.is_in(x, y):
self.component.dispatch(drag_event, "dropped_on")
# If a downstream component wants to express that it handled the
return self._drag_result
def wx_drag_over ( self, x, y, drag_object, drag_result ):
y = self._flip_y( y )
drag_over_event = DragEvent( x = x,
y = y,
x0 = 0.0,
y0 = 0.0,
copy = drag_result != wx.DragMove,
obj = drag_object,
start_event = default_start_event,
window = self )
# By default, don't indicate that we can be dropped on. It is up
# to the component to set this correctly.
self._drag_result = wx.DragNone
if self.component.is_in(x, y):
self.component.dispatch(drag_over_event, "drag_over")
return self._drag_result
def wx_drag_leave ( self, drag_object ):
drag_leave_event = DragEvent( x = 0.0,
y = 0.0,
x0 = 0.0,
y0 = 0.0,
copy = False,
obj = drag_object,
start_event = default_start_event,
window = self )
self.component.dispatch(drag_leave_event, "drag_leave")
return
def create_menu ( self, menu_definition, owner ):
"Create a wxMenu from a string description"
return MakeMenu( menu_definition, owner, True, self.control )
def popup_menu ( self, menu, x, y ):
"Pop-up a wxMenu at a specified location"
self.control.PopupMenuXY( menu.menu, int(x), int( self._flip_y(y) ) )
return
# EOF
|
bsd-3-clause
| -7,273,876,677,575,929,000
| 36.144487
| 81
| 0.542788
| false
|
joshmoore/zeroc-ice
|
java/test/Freeze/fileLock/run.py
|
1
|
1622
|
#!/usr/bin/env python
# **********************************************************************
#
# Copyright (c) 2003-2011 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import os, sys
path = [ ".", "..", "../..", "../../..", "../../../.." ]
head = os.path.dirname(sys.argv[0])
if len(head) > 0:
path = [os.path.join(head, p) for p in path]
path = [os.path.abspath(p) for p in path if os.path.exists(os.path.join(p, "scripts", "TestUtil.py")) ]
if len(path) == 0:
raise "can't find toplevel directory!"
sys.path.append(os.path.join(path[0]))
from scripts import *
dbdir = os.path.join(os.getcwd(), "db")
TestUtil.cleanDbDir(dbdir)
print "testing Freeze file lock...",
sys.stdout.flush()
client = os.path.join("test.Freeze.fileLock.Client")
clientFail = os.path.join("test.Freeze.fileLock.ClientFail")
clientExe = TestUtil.startClient(client, "", None, None, False)
clientExe.expect('File lock acquired.\.*')
clientFailExe = TestUtil.startClient(clientFail, "", None, None, False)
clientFailExe.expect('File lock not acquired.')
clientFailExe.wait()
# send some output to client to terminate it.
clientExe.sendline('go')
clientExe.expect('File lock released.')
clientExe.wait()
# The lock is gone try to acquire it again.
clientExe = TestUtil.startClient(client, "", None, None, False)
clientExe.expect('File lock acquired.\.*')
clientExe.sendline('go')
clientExe.expect('File lock released.')
clientExe.wait()
print "ok"
|
gpl-2.0
| -2,917,443,800,651,901,400
| 31.44
| 103
| 0.634402
| false
|
nelseric/qmk_firmware
|
lib/python/qmk/cli/new/keymap.py
|
16
|
1884
|
"""This script automates the copying of the default keymap into your own keymap.
"""
import shutil
from pathlib import Path
import qmk.path
from qmk.decorators import automagic_keyboard, automagic_keymap
from milc import cli
@cli.argument('-kb', '--keyboard', help='Specify keyboard name. Example: 1upkeyboards/1up60hse')
@cli.argument('-km', '--keymap', help='Specify the name for the new keymap directory')
@cli.subcommand('Creates a new keymap for the keyboard of your choosing')
@automagic_keyboard
@automagic_keymap
def new_keymap(cli):
"""Creates a new keymap for the keyboard of your choosing.
"""
# ask for user input if keyboard or keymap was not provided in the command line
keyboard = cli.config.new_keymap.keyboard if cli.config.new_keymap.keyboard else input("Keyboard Name: ")
keymap = cli.config.new_keymap.keymap if cli.config.new_keymap.keymap else input("Keymap Name: ")
# generate keymap paths
kb_path = Path('keyboards') / keyboard
keymap_path = qmk.path.keymap(keyboard)
keymap_path_default = keymap_path / 'default'
keymap_path_new = keymap_path / keymap
# check directories
if not kb_path.exists():
cli.log.error('Keyboard %s does not exist!', kb_path)
return False
if not keymap_path_default.exists():
cli.log.error('Keyboard default %s does not exist!', keymap_path_default)
return False
if keymap_path_new.exists():
cli.log.error('Keymap %s already exists!', keymap_path_new)
return False
# create user directory with default keymap files
shutil.copytree(keymap_path_default, keymap_path_new, symlinks=True)
# end message to user
cli.log.info("%s keymap directory created in: %s", keymap, keymap_path_new)
cli.log.info("Compile a firmware with your new keymap by typing: \n\n\tqmk compile -kb %s -km %s\n", keyboard, keymap)
|
gpl-2.0
| 5,085,182,209,697,416,000
| 39.085106
| 122
| 0.706476
| false
|
isaacyeaton/global-dyn-non-equil-gliding
|
Code/squirrel.py
|
1
|
13697
|
from __future__ import division
import numpy as np
from scipy import interpolate
import pandas as pd
def load_run(run_num, df):
"""Load in trial data.
Parameters
----------
run_num : int
Which run to load.
df : DataFrame
The DataFrame loaded from the original excel file.
Returns
-------
pos : array
(x, z) positions
tvec : array
Time vector for the run.
dt : float
Sampling interval between data points.
"""
# sampling rate
# http://rsif.royalsocietypublishing.org/content/10/80/20120794/suppl/DC1
if run_num <= 7:
dt = 1 / 60.
else:
dt = 1 / 125.
xkey = "'Caribou_Trial_{0:02d}_Xvalues'".format(run_num)
zkey = "'Caribou_Trial_{0:02d}_Zvalues'".format(run_num)
d = df[[xkey, zkey]]
d = np.array(d)
# get rid of nans and a bunch of junky zeros starting at row 301
start_bad = np.where(np.isnan(d))[0]
if len(start_bad) > 0:
start_bad = start_bad[0]
d = d[:start_bad]
# get rid of zeros (if we get past rows 301...)
start_bad = np.where(d == 0.)[0]
if len(d) > 300 and len(start_bad) > 0:
start_bad = start_bad[0]
d = d[:start_bad]
tvec = np.arange(0, len(d)) * dt
return d, tvec, dt
def calc_vel(pos_data, dt):
"""Velocity in the x and z directions.
Parameters
----------
pos_data : array
(x, z) position information
dt : float
Sampling rate
Returns
-------
vel : array
(vx, vz)
"""
vx = np.gradient(pos_data[:, 0], dt)
vy = np.gradient(pos_data[:, 1], dt)
return np.c_[vx, vy]
def calc_accel(vel_data, dt):
"""Acceleration in the x and z directions.
Parameters
----------
vel_data : array
(vx, vz) velocity data
dt : float
Sampling rate
Returns
-------
accel : array
(ax, az)
"""
ax = np.gradient(vel_data[:, 0], dt)
ay = np.gradient(vel_data[:, 1], dt)
return np.c_[ax, ay]
def calc_vel_mag(vel_data):
"""Velocity magnitude.
Parameters
----------
vel_data : array
(vx, vz) velocity data
Returns
-------
vel_mag : array
np.sqrt(vx**2 + vz**2)
"""
return np.sqrt(vel_data[:, 0]**2 + vel_data[:, 1]**2)
def calc_gamma(vel_data):
"""Glide angle.
Parameters
----------
vel_data : array
(vx, vz)
Returns
-------
gamma : array
Glide angle in rad
"""
return -np.arctan2(vel_data[:, 1], vel_data[:, 0])
def splfit_all(data, tvec, k=5, s=.5):
"""Fit a spline to the data.
"""
posx = interpolate.UnivariateSpline(tvec, data[:, 0], k=k, s=s)
posz = interpolate.UnivariateSpline(tvec, data[:, 1], k=k, s=s)
velx = posx.derivative(1)
velz = posz.derivative(1)
accx = posx.derivative(2)
accz = posz.derivative(2)
pos = np.c_[posx(tvec), posz(tvec)]
vel = np.c_[velx(tvec), velz(tvec)]
acc = np.c_[accx(tvec), accz(tvec)]
return pos, vel, acc
def polyfit(data, tvec, intfun):
"""Fit a spline to the data.
"""
posx = intfun(tvec, data[:, 0])
posz = intfun(tvec, data[:, 1])
velx = posx.derivative(1)
velz = posz.derivative(1)
accx = posx.derivative(2)
accz = posz.derivative(2)
pos = np.c_[posx(tvec), posz(tvec)]
vel = np.c_[velx(tvec), velz(tvec)]
acc = np.c_[accx(tvec), accz(tvec)]
return pos, vel, acc
def polyfit_all(data, tvec, deg, wn=0):
"""Fit a spline to the data.
TODO: this does not to the mirroring correctly!
"""
start = data[:wn][::-1]
stop = data[-wn:][::-1]
datanew = np.r_[start, data, stop]
tvecnew = np.r_[tvec[:wn][::-1], tvec, tvec[-wn:][::-1]]
posx = np.polyfit(tvecnew, datanew[:, 0], deg)
posz = np.polyfit(tvecnew, datanew[:, 1], deg)
velx = np.polyder(posx, 1)
velz = np.polyder(posz, 1)
accx = np.polyder(posx, 2)
accz = np.polyder(posz, 2)
pos = np.c_[np.polyval(posx, tvec), np.polyval(posz, tvec)]
vel = np.c_[np.polyval(velx, tvec), np.polyval(velz, tvec)]
acc = np.c_[np.polyval(accx, tvec), np.polyval(accz, tvec)]
return pos, vel, acc
def fill_df(pos, vel, acc, gamma, velmag, tvec, i):
"""Put one trial's data into a DataFrame.
Parameters
----------
pos : (n x 2) array
x and z position data
vel : (n x 2) array
x and z velocity data
acc : (n x 2) array
x and z acceleration data
gamma : (n x 1) array
Glide angles in deg
velmag : (n x 1) array
Velocity magnitude
tvec : (n x 1) array
Time points
i : int
Trial number that becomes the column name
Returns
-------
posx, posz, velx, velz, accx, accz, gamm, vmag : DataFrame
Data in a DataFrame
"""
posx = pd.DataFrame(pos[:, 0], index=tvec, columns=[str(i)])
posz = pd.DataFrame(pos[:, 1], index=tvec, columns=[str(i)])
velx = pd.DataFrame(vel[:, 0], index=tvec, columns=[str(i)])
velz = pd.DataFrame(vel[:, 1], index=tvec, columns=[str(i)])
accx = pd.DataFrame(acc[:, 0], index=tvec, columns=[str(i)])
accz = pd.DataFrame(acc[:, 1], index=tvec, columns=[str(i)])
gamm = pd.DataFrame(gamma, index=tvec, columns=[str(i)])
vmag = pd.DataFrame(velmag, index=tvec, columns=[str(i)])
return posx, posz, velx, velz, accx, accz, gamm, vmag
def window_bounds(i, n, wn):
"""Start and stop indices for a moving window.
Parameters
----------
i : int
Current index
n : int
Total number of points
wn : int, odd
Total window size
Returns
-------
start : int
Start index
stop : int
Stop index
at_end : bool
Whether we are truncating the window
"""
at_end = False
hw = wn // 2
start = i - hw
stop = i + hw + 1
if start < 0:
at_end = True
start = 0
elif stop > n:
at_end = True
stop = n
return start, stop, at_end
def moving_window_pts(data, tvec, wn, deg=2, drop_deg=False):
"""Perform moving window smoothing.
Parameters
----------
data : (n x 2) array
Data to smooth and take derivatives of
tvec : (n x 1) array
Time vector
wn : int, odd
Total window size
deg : int, default=2
Polynomial degree to fit to data
drop_deg : bool, default=False
Whether to drop in interpolating polynomial at the
ends of the time series, since the truncated window can
negatively affect things.
Returns
-------
spos : (n x 2) array
x and z smoothed data
svel : (n x 2) array
First derivatives of smoothed data (velocity)
sacc : (n x 2) array
Second derivatives of smoothed data (acceleration)
"""
deg_orig = deg
posx, posz = data.T
npts = len(posx)
spos = np.zeros((npts, 2))
svel = np.zeros((npts, 2))
sacc = np.zeros((npts, 2))
for i in range(npts):
start, stop, at_end = window_bounds(i, npts, wn)
if at_end and drop_deg:
deg = deg_orig - 1
else:
deg = deg_orig
t = tvec[start:stop]
x = posx[start:stop]
z = posz[start:stop]
pfpx = np.polyfit(t, x, deg)
pfpz = np.polyfit(t, z, deg)
pfvx = np.polyder(pfpx, m=1)
pfvz = np.polyder(pfpz, m=1)
pfax = np.polyder(pfpx, m=2)
pfaz = np.polyder(pfpz, m=2)
tval = tvec[i]
spos[i] = np.polyval(pfpx, tval), np.polyval(pfpz, tval)
svel[i] = np.polyval(pfvx, tval), np.polyval(pfvz, tval)
sacc[i] = np.polyval(pfax, tval), np.polyval(pfaz, tval)
return spos, svel, sacc
def moving_window_pos(data, tvec, wn, deg=2):
"""Do a moving window of +/- wn, where wn is position.
"""
xwn = wn
hxwn = xwn / 2
posx, posz = data.T
npts = len(posx)
spos = np.zeros((npts, 2))
svel = np.zeros((npts, 2))
sacc = np.zeros((npts, 2))
for i in range(npts):
ind = np.where((posx >= posx[i] - hxwn) & (posx <= posx[i] + hxwn))[0]
t = tvec[ind]
x = posx[ind]
z = posz[ind]
pfpx = np.polyfit(t, x, deg)
pfpz = np.polyfit(t, z, deg)
pfvx = np.polyder(pfpx, m=1)
pfvz = np.polyder(pfpz, m=1)
pfax = np.polyder(pfpx, m=2)
pfaz = np.polyder(pfpz, m=2)
tval = tvec[i]
spos[i] = np.polyval(pfpx, tval), np.polyval(pfpz, tval)
svel[i] = np.polyval(pfvx, tval), np.polyval(pfvz, tval)
sacc[i] = np.polyval(pfax, tval), np.polyval(pfaz, tval)
return spos, svel, sacc
def moving_window_spl(data, tvec, wn, s=.5):
"""Do a moving window of +/- wn on the data and
take derivatves.
"""
posx, posz = data.T
npts = len(posx)
spos = np.zeros((npts, 2))
svel = np.zeros((npts, 2))
sacc = np.zeros((npts, 2))
for i in range(npts):
start, stop, at_end = window_bounds(i, npts, wn)
t = tvec[start:stop]
x = posx[start:stop]
z = posz[start:stop]
px = interpolate.UnivariateSpline(t, x, k=5, s=s)
pz = interpolate.UnivariateSpline(t, z, k=5, s=s)
vx = px.derivative(1)
vz = pz.derivative(1)
ax = px.derivative(2)
az = pz.derivative(2)
tval = tvec[i]
spos[i] = px(tval), pz(tval)
svel[i] = vx(tval), vz(tval)
sacc[i] = ax(tval), az(tval)
return spos, svel, sacc
def svfilter(tvec, data, wn, order, mode='interp'):
"""Use a Savitzky-Golay to smooth position data and to
calculate the derivatives.
This blog post has a modification of this, which might have better
high frequency filtering: http://bit.ly/1wjZKvk
"""
from scipy.signal import savgol_filter
x, z = data.T
dt = np.diff(tvec).mean()
px = savgol_filter(x, wn, order, mode=mode)
pz = savgol_filter(z, wn, order, mode=mode)
vx = savgol_filter(x, wn, order, mode=mode, deriv=1, delta=dt)
vz = savgol_filter(z, wn, order, mode=mode, deriv=1, delta=dt)
ax = savgol_filter(x, wn, order, mode=mode, deriv=2, delta=dt)
az = savgol_filter(z, wn, order, mode=mode, deriv=2, delta=dt)
return np.c_[px, pz], np.c_[vx, vz], np.c_[ax, az]
def clcd_binning(gl_bins, gl_rad, Cl, Cd):
"""Bin the lift and drag coefficient curves against glide angle
to get average across all trajectories
Parameters
----------
gl_bins : array
The different bins [left, right)
gl_rad : DataFrame
Glide angle data in radians
Cl : DataFrame
Lift coefficients
Cd : DataFrame
Drag coefficients
Returns
-------
clcd_means : array, (n x 3)
lift-to-drag ratio mean, std, stderror
cl_means : array, (n x 3)
same for lift coefficient
cd_means : array, (n x 3)
same for drag coefficient
gl_means : array, (n x 3)
same for glide angle
Notes
-----
This uses a Taylor expansion for the Cl/Cd ratio statistics,
becuase I guess using a standard ratio is biased.
"""
nbins = len(gl_bins)
gl_flattened = gl_rad.values.flatten()
cl_flattened = Cl.values.flatten()
cd_flattened = Cd.values.flatten()
bins = np.digitize(gl_flattened, gl_bins)
all_indices = []
no_data = []
cl_means = np.zeros((nbins, 3))
cd_means = np.zeros((nbins, 3))
clcd_means = np.zeros((nbins, 3))
gl_means = np.zeros((nbins, 3))
for idx in np.arange(nbins):
# find relevent indices
all_indices.append(np.where(bins == idx)[0])
indices = np.where(bins == idx)[0]
if len(indices) == 0:
no_data.append(idx)
continue
# get out our data
glsnip = gl_flattened[indices]
clsnip = cl_flattened[indices]
cdsnip = cd_flattened[indices]
clcd_means[idx] = taylor_moments(clsnip, cdsnip)
cl_means[idx] = simple_moments(clsnip)
cd_means[idx] = simple_moments(cdsnip)
gl_means[idx] = simple_moments(glsnip)
# remove where we have no interpolation
# clcd_means[no_data] = np.nan
# cl_means[no_data] = np.nan
# cd_means[no_data] = np.nan
# gl_means[no_data] = np.nan
return clcd_means[1:], cl_means[1:], cd_means[1:], gl_means[1:]
def taylor_moments(x, y):
"""Taylor series approximations to the moments of a ratio.
See http://bit.ly/1uy8qND and http://bit.ly/VHPX4u
and http://en.wikipedia.org/wiki/Ratio_estimator
Parameters
----------
x : 1D array
Numerator of the ratio
y : 1D array
Denomenator of the ratio
Returns
-------
tmean : float
Mean of the ratio
tstd : float
STD of the ratio
tserr : float
Standard error of the ratio
"""
n = len(x)
ex = x.mean()
ey = y.mean()
varx = x.var()
vary = y.var()
cov = np.cov(x, y)[0, 1]
tmean = ex / ey - cov / ey**2 + ex / ey**3 * vary
tvar = varx / ey**2 - 2 * ex / ey**3 * cov + ex**2 / ey**4 * vary
tstd = np.sqrt(tvar)
return tmean, tstd, tstd / np.sqrt(n)
def simple_moments(x):
"""Moments for Cl and Cd curves.
Parameters
----------
x : 1D numpy array
Returns
-------
mean, std, sterr
"""
mean = x.mean()
std = x.std()
sterr = std / np.sqrt(len(x))
return mean, std, sterr
def interpspl(data, npts, k=3, s=3):
"""Interpolate using splines.
"""
tck, u = interpolate.splprep(data.T, k=k, s=s, nest=-1)
datanew = interpolate.splev(np.linspace(0, 1, npts), tck)
return np.array(datanew).T
|
mit
| -4,676,687,735,099,849,000
| 23.458929
| 78
| 0.557494
| false
|
EUDAT-B2STAGE/http-api-base
|
old_stuff/rapydo/services/celery/worker.py
|
1
|
1165
|
# -*- coding: utf-8 -*-
"""
Celery pattern. Some interesting read here:
http://blog.miguelgrinberg.com/post/celery-and-the-flask-application-factory-pattern
Of course that discussion is not enough for
a flask templating framework like ours.
So we made some improvement along the code.
"""
from rapydo.server import create_app
from rapydo.services.celery.celery import celery_app
from rapydo.utils.meta import Meta
from rapydo.utils.logs import get_logger
log = get_logger(__name__)
################################################
# Reload Flask app code also for the worker
# This is necessary to have the app context available
app = create_app(worker_mode=True, debug=True)
# Recover celery app with current app
# celery_app = MyCelery(app)._current
# celery_app = MyCelery(app)._current
log.debug("Celery %s" % celery_app)
################################################
# Import tasks modules to make sure all tasks are avaiable
meta = Meta()
main_package = "commons.tasks."
# Base tasks
submodules = meta.import_submodules_from_package(main_package + "base")
# Custom tasks
submodules = meta.import_submodules_from_package(main_package + "custom")
|
mit
| 2,851,370,823,037,356,000
| 28.125
| 84
| 0.693562
| false
|
Oppium/BTCeGUI
|
BTCeGUI.py
|
1
|
21599
|
#! python3
import tkinter
import tkinter.ttk as ttk
import threading
import operator
import time
import copy
import os.path
import datetime
import queue
import BTCe
api = BTCe.API('BTCe.ini')
console = None
def format_float(value):
return ('{:0.8f}'.format(float(value)).rstrip('0').rstrip('.'))
def validate_float(value):
if not value:
return True
try:
v = float(value)
return True
except ValueError:
return False
class CurrencyBox(ttk.Combobox):
"""Currency pair selection combo box."""
def __init__(self, parent):
ttk.Combobox.__init__(self, parent, state='readonly', justify='left', width=12)
self.set('Currency Pair')
def update(self, pairs):
"""Update available pairs."""
if not pairs:
return
values = [pair.upper().replace('_', '/') for pair in pairs]
values.sort()
self.config(values=values)
class TradeFrame(ttk.Frame):
"""Buy/sell box."""
def __init__(self, parent, type):
"""type: Buy | Sell"""
ttk.Frame.__init__(self, parent, borderwidth=10, relief='groove')
self.type = type
self.funds = {}
self.fee = 0
self.allchecked = tkinter.IntVar()
self.focus = 0
self.currvars = [tkinter.StringVar(value='0') for i in range(2)]
self.ratevar = tkinter.StringVar(value='0')
self.feevar = tkinter.StringVar(value='0')
self.ignoretrace = False
# init widgets
validatecommand = (self.register(validate_float), '%P')
self.currentries = [ttk.Entry(self, justify='right', validate='key', validatecommand=validatecommand, textvariable=self.currvars[i]) for i in range(2)]
self.currlabels = [ttk.Label(self, text='') for i in range(2)]
self.rateentry = ttk.Entry(self, justify='right', validate='key', validatecommand=validatecommand, textvariable=self.ratevar)
self.feeentry = ttk.Entry(self, justify='right', state='readonly', validate='key', validatecommand=validatecommand, textvariable=self.feevar)
self.feelabel = ttk.Label(self, text='')
self.orderbutton = ttk.Button(self, text='Place Order', state='disabled', command=self.placeorder)
# frame layout
ttk.Label(self, text=type).grid(column=0, row=0, sticky='w')
ttk.Label(self, text='Amount:').grid(column=0, row=1, sticky='w')
self.currentries[0].grid(column=1, row=1, sticky='nsew')
self.currlabels[0].grid(column=2, row=1, sticky='w')
ttk.Label(self, text='Value:').grid(column=0, row=2, sticky='w')
self.currentries[1].grid(column=1, row=2, sticky='nsew')
self.currlabels[1].grid(column=2, row=2, sticky='w')
ttk.Label(self, text='Rate:').grid(column=0, row=3, sticky='w')
self.rateentry.grid(column=1, row=3, sticky='nsew')
ttk.Label(self, text='Fee:').grid(column=0, row=4, sticky='w')
self.feelabel.grid(column=2, row=4, sticky='w')
self.feeentry.grid(column=1, row=4, sticky='nsew')
ttk.Checkbutton(self, text='All', variable=self.allchecked, command=self.update_amounts).grid(column=1, row=5, sticky='nw')
self.orderbutton.grid(column=1, row=5, sticky='ne')
self.grid_columnconfigure(0, weight=0, minsize=50)
self.grid_columnconfigure(1, weight=1)
self.grid_columnconfigure(2, weight=0, minsize=50)
self.grid_rowconfigure(0, weight=0)
self.grid_rowconfigure(1, weight=1, pad=5)
self.grid_rowconfigure(2, weight=1, pad=5)
self.grid_rowconfigure(3, weight=1, pad=5)
self.grid_rowconfigure(4, weight=1, pad=5)
self.grid_rowconfigure(5, weight=1)
# events
self.ratevar.trace('w', self.update_amounts)
for i in range(2):
self.currvars[i].trace('w', lambda name, index, op, focus=i: self.setfocus(focus))
def setfocus(self, focus, *args):
"""Change focus due to currency entry edit (using trace)."""
if not self.ignoretrace:
self.focus = focus
self.update_amounts(args)
def setrate(self, rate):
self.ratevar.set(format_float(rate))
def placeorder(self):
self.orderbutton.config(state='disabled', text='Placing Order...')
# get all trade data from current entries and labels
pair = '_'.join(self.currlabels[i].cget('text') for i in range(2)).lower()
type = self.type.lower()
rate = float(self.rateentry.get())
amount = float(self.currentries[0].get())
threading.Thread(target=self.master.placeorder, args=[pair, type, rate, amount]).start()
def update(self, pair, funds, fee, cantrade, ordering):
"""Update currency labels and amounts."""
if len(pair) == 2:
for i in range(2):
self.currlabels[i].config(text=pair[i])
self.feelabel.config(text=(pair[0] if self.type == 'Buy' else pair[1]))
# enable/disable order button
amount = self.currvars[0].get()
amount = float(0.0 if amount == '' else amount)
rate = self.ratevar.get()
rate = float(0.0 if rate == '' else rate)
if cantrade and len(pair) == 2 and amount > 0.0 and rate > 0.0 and not ordering:
self.orderbutton.config(state='normal', text='Place Order')
elif ordering:
self.orderbutton.config(state='disabled', text='Placing Order...')
else:
self.orderbutton.config(state='disabled', text='Place Order')
self.funds = funds
self.fee = float(fee) / 100.0
self.update_amounts()
def update_amounts(self, *args):
"""Update currency amounts."""
self.ignoretrace = True
# auto-fill focus in case of a checked All button
pair = [self.currlabels[i].cget('text') for i in range(2)]
if self.funds and self.allchecked.get() and pair[0] and pair[1]:
self.focus = 1 if self.type == 'Buy' else 0
balance = self.funds[pair[self.focus].lower()]
self.currvars[self.focus].set(format_float(balance))
# calculate non-focused entry
rate = self.ratevar.get()
rate = float(0.0 if rate == '' else rate)
op = operator.mul if self.focus == 0 else operator.truediv
nonfocus = 1 - self.focus
focus = self.currvars[self.focus].get()
focus = float(focus) if focus else 0.0
self.currvars[nonfocus].set(format_float(op(focus, rate) if rate != 0.0 else 0.0))
# calculate fee
feedval = self.currvars[0].get() if self.type == 'Buy' else self.currvars[1].get()
feedval = float(feedval) if feedval else 0.0
self.feevar.set(format_float(self.fee * feedval))
# (re)set readonly/normal entry states
state = 'readonly' if self.allchecked.get() else 'normal'
for currentry in self.currentries:
currentry.config(state=state)
self.ignoretrace = False
class ConsoleFrame(ttk.Frame):
"""Console."""
def __init__(self, parent):
ttk.Frame.__init__(self, parent, borderwidth=10, relief='groove')
self.queue = queue.Queue()
# init widgets
self.text = tkinter.Text(self, height=4, state='disabled')
vsb = ttk.Scrollbar(self, orient='vertical', command=self.text.yview)
self.text.config(yscrollcommand=vsb.set)
# frame layout
ttk.Label(self, text='Console').grid(column=0, row=0, sticky='w', columnspan=2)
self.text.grid(column=0, row=1, sticky='nsew')
vsb.grid(column=1, row=1, sticky='nse')
self.grid_columnconfigure(0, weight=1)
self.grid_columnconfigure(1, weight=0)
self.grid_rowconfigure(0, weight=0)
self.grid_rowconfigure(1, weight=1, pad=5)
def print(self, text):
self.queue.put(text)
def update(self):
atend = self.text.yview()[1] == 1.0
self.text.config(state='normal')
while not self.queue.empty():
self.text.insert('end', '{}: {}\n'.format(datetime.datetime.now().strftime('%H:%M:%S'), self.queue.get()))
self.text.config(state='disabled')
if atend:
self.text.see('end')
class Console:
def print(self, text):
print(text)
class OrderFrame(ttk.Frame):
"""Frame for showing open orders."""
status = ['Active', 'Filled', 'Partially Filled', 'Cancelled']
def __init__(self, parent):
ttk.Frame.__init__(self, parent, borderwidth=10, relief='groove')
# init widgets
self.table = ttk.Treeview(self, columns=['id', 'time', 'pair', 'type', 'rate', 'amount', 'value', 'status'], show='headings', height=3)
vsb = ttk.Scrollbar(self, orient='vertical', command=self.table.yview)
self.table.config(yscrollcommand=vsb.set)
self.orderbutton = ttk.Button(self, text='Cancel Order(s)', state='disabled', command=self.cancelorders)
# frame layout
ttk.Label(self, text='Open Orders').grid(column=0, row=0, sticky='w')
self.table.grid(column=0, row=1, sticky='nsew')
vsb.grid(column=1, row=1, sticky='ns')
self.orderbutton.grid(column=0, row=2, sticky='nse')
self.grid_columnconfigure(0, weight=1, pad=5)
self.grid_columnconfigure(1, weight=0, pad=5)
self.grid_rowconfigure(0, weight=0)
self.grid_rowconfigure(1, weight=1, pad=5)
# table layout
self.table.heading('id', text='ID', anchor='w')
self.table.heading('time', text='Placed on', anchor='w')
self.table.heading('pair', text='Pair', anchor='w')
self.table.heading('type', text='Type', anchor='w')
self.table.heading('rate', text='Rate', anchor='w')
self.table.heading('amount', text='Amount', anchor='w')
self.table.heading('value', text='Value', anchor='w')
self.table.heading('status', text='Status', anchor='w')
self.table.column('id', width=15)
self.table.column('time', width=60)
self.table.column('pair', width=10)
self.table.column('type', width=20)
self.table.column('rate', width=30)
self.table.column('amount', width=60)
self.table.column('value', width=60)
self.table.column('status', width=40)
def cancelorders(self):
"""Cancel all selected orders."""
self.orderbutton.config(state='disabled', text='Cancelling...')
selects = self.table.selection()
selectids = []
for select in selects:
selectids.append(int(self.table.item(select)['values'][0]))
threading.Thread(target=self.master.cancelorders, args=[selectids]).start()
def update(self, orders, cantrade, cancelling):
"""Build order list and update table."""
# enable/disable order button
if cantrade and orders and not cancelling:
self.orderbutton.config(state='normal', text='Cancel Order(s)')
elif cancelling:
self.orderbutton.config(state='disabled', text='Cancelling...')
else:
self.orderbutton.config(state='disabled', text='Cancel Order(s)')
# store old selection keys
selects = self.table.selection()
selectids = []
for select in selects:
selectids.append(int(self.table.item(select)['values'][0]))
# delete old entries
self.table.delete(*self.table.get_children())
if not orders:
return
# insert new entries and select old keys
for id in orders:
order = orders[id]
time = datetime.datetime.utcfromtimestamp(order['timestamp_created'])
pair = order['pair'].upper().split('_')
rate = float(order['rate'])
amount = float(order['amount'])
value = format_float(rate * amount) + ' ' + pair[1]
amount = format_float(amount) + ' ' + pair[0]
status = OrderFrame.status[order['status']]
values = [id, time, '/'.join(pair), order['type'].capitalize(), rate, amount, value, status]
item = self.table.insert('', 'end', values=values)
if int(id) in selectids:
self.table.selection_add(item)
class DepthFrame(ttk.Frame):
"""Treeview and components for a list of offers."""
def __init__(self, parent, type):
"""type: Ask | Bid"""
ttk.Frame.__init__(self, parent, borderwidth=10, relief='groove')
self.type = type
# init widgets
self.table = ttk.Treeview(self, columns=['rate', 'curr0', 'curr1'], show='headings')
vsb = ttk.Scrollbar(self, orient='vertical', command=self.table.yview)
self.table.configure(yscrollcommand = vsb.set)
# frame layout
ttk.Label(self, text=type).grid(column=0, row=0, sticky='w')
self.table.grid(column=0, row=1, sticky='nsew')
vsb.grid(column=1, row=1, sticky='ns')
self.grid_columnconfigure(0, weight=1, pad=5)
self.grid_columnconfigure(1, weight=0, pad=5)
self.grid_rowconfigure(0, weight=0)
self.grid_rowconfigure(1, weight=1, pad=5)
# table layout
self.table.column('rate', width=60)
self.table.column('curr0', width=80)
self.table.column('curr1', width=80)
def update(self, depth, pair):
"""Clear and rebuild the depth table."""
if not depth or len(pair) != 2:
return
# update headings
self.table.heading('rate', text='Rate', anchor='w')
self.table.heading('curr0', text=pair[0], anchor='w')
self.table.heading('curr1', text=pair[1], anchor='w')
# store old selection keys
selects = self.table.selection()
selectrates = []
for select in selects:
selectrates.append(float(self.table.item(select)['values'][0]))
# delete old entries
self.table.delete(*self.table.get_children())
# insert new entries and select old keys
orders = depth[self.type.lower() + 's']
for order in orders:
values = [float(order[0]), float(order[1]), format_float(float(order[0]) * float(order[1]))]
item = self.table.insert('', 'end', values=values)
if values[0] in selectrates:
self.table.selection_add(item)
class BalanceFrame(ttk.Frame):
"""Tree view for personal balances."""
def __init__(self, parent):
ttk.Frame.__init__(self, parent, borderwidth=10, relief='groove')
# init widgets
self.table = ttk.Treeview(self, columns = ['curr', 'funds'], show='headings')
vsb = ttk.Scrollbar(self, orient='vertical', command=self.table.yview)
self.table.configure(yscrollcommand = vsb.set)
# frame layout
ttk.Label(self, text='Funds').grid(column=0, row=0, columnspan=2, sticky='w')
self.table.grid(column=0, row=1, sticky='nsew')
vsb.grid(column=1, row=1, sticky='ns')
self.grid_columnconfigure(0, weight=1)
self.grid_rowconfigure(0, weight=0)
self.grid_rowconfigure(1, weight=1)
# table layout
self.table.column('curr', width=60)
self.table.column('funds', width=100)
self.table.heading('curr', text='Currency', anchor='w')
self.table.heading('funds', text='Balance', anchor='w')
def update(self, funds):
"""Clear and rebuild the balance table."""
if not funds:
return
# store old selection keys
selects = self.table.selection()
selectcurrs = []
for select in selects:
selectcurrs.append(self.table.item(select)['values'][0])
# delete old entries
for entry in self.table.get_children():
self.table.delete(entry)
# insert new sorted entries and select old keys
funds = list(funds.items())
funds.sort()
for fund in funds:
curr = fund[0].upper()
item = self.table.insert('', 'end', values=[curr, format_float(fund[1])])
if curr in selectcurrs:
self.table.selection_add(item)
class Main(tkinter.Tk):
"""Main frame."""
def __init__(self):
tkinter.Tk.__init__(self)
self.title('BTCeGUI')
self.lockdata = threading.Lock()
self.locknonce = threading.Lock()
self.info = {}
self.depth = {}
self.userinfo = {}
self.orders={}
self.pair = {}
self.run = True
self.buying = False
self.selling = False
self.cancelling = False
# layout
self.geometry('800x800+100+100')
self.currencybox = CurrencyBox(self)
self.currencybox.grid(column=0, row=0, stick='nw')
self.buybox = TradeFrame(self, 'Buy')
self.buybox.grid(column=0, row=1, sticky='nsew', padx=20, pady=5)
self.sellbox = TradeFrame(self, 'Sell')
self.sellbox.grid(column=1, row=1, sticky='nsew', padx=20, pady=5)
self.askframe = DepthFrame(self, 'Ask')
self.askframe.grid(column=0, row=2, sticky='nsew', padx=5, pady=5)
self.bidframe = DepthFrame(self, 'Bid')
self.bidframe.grid(column=1, row=2, sticky='nsew', padx=5, pady=5)
self.balanceframe = BalanceFrame(self)
self.balanceframe.grid(column=2, row=2, sticky='nsew', padx=5, pady=5)
self.orderframe = OrderFrame(self)
self.orderframe.grid(column=0, row=3, sticky='nsew', padx=5, pady=5, columnspan=3)
self.console = ConsoleFrame(self)
self.console.grid(column=0, row=4, sticky='nsew', padx=5, pady=5, columnspan=3)
global console
console = self.console
self.grid_columnconfigure(0, weight=1)
self.grid_columnconfigure(1, weight=1)
self.grid_columnconfigure(2, weight=0)
self.grid_rowconfigure(0, weight=0)
self.grid_rowconfigure(1, weight=0)
self.grid_rowconfigure(2, weight=1)
self.grid_rowconfigure(3, weight=0)
self.grid_rowconfigure(4, weight=0)
# events
self.askframe.table.bind('<Double-1>', lambda event: self.ondouble_depth(self.askframe.table, self.buybox, event))
self.bidframe.table.bind('<Double-1>', lambda event: self.ondouble_depth(self.bidframe.table, self.sellbox, event))
# api threads
if api.secret == b'copy API secret here' or api.key == b'copy API key here':
console.print('No API secret/key found. Only public data available.')
else:
threading.Thread(target=self.update_userinfo_loop).start()
threading.Thread(target=self.update_orders_loop).start()
threading.Thread(target=self.update_depth_loop).start()
threading.Thread(target=self.update_info_loop).start()
self.sync()
def exit(self):
"""Stop running threads."""
self.run = False
# redirect console prints to the normal console
global console
console = Console()
def ondouble_depth(self, table, box, event):
"""Send double-clicked rate to trade box."""
item = table.identify('item', event.x, event.y)
if (item):
box.setrate(table.item(item, 'values')[0])
def sync(self):
"""Sync GUI to states."""
self.lockdata.acquire()
userinfo = copy.copy(self.userinfo)
orders = copy.copy(self.orders)
info = copy.copy(self.info)
depth = copy.copy(self.depth)
self.pair = copy.copy(self.currencybox.get().split('/'))
self.lockdata.release()
pairs = None
if info:
pairs = info.get('pairs')
self.currencybox.update(pairs)
funds = None
if userinfo:
funds = userinfo.get('funds')
# update depth tables
fee = 0
pair = []
if (depth):
pair = next(iter(depth))
if pairs:
fee = pairs[pair]['fee']
depth = depth[pair]
pair = pair.upper().split('_')
cantrade = True if userinfo and userinfo['rights']['trade'] == 1 else False
self.askframe.update(depth, pair)
self.bidframe.update(depth, pair)
self.balanceframe.update(funds)
self.buybox.update(pair, funds, fee, cantrade, self.buying)
self.sellbox.update(pair, funds, fee, cantrade, self.selling)
self.orderframe.update(orders, cantrade, self.cancelling)
self.console.update()
self.after(100, self.sync)
def update_depth_loop(self):
while self.run:
self.update_depth()
time.sleep(1.0)
def update_depth(self):
# if currency pair is valid get depth table
self.lockdata.acquire()
pair = copy.copy(self.pair)
self.lockdata.release()
depth = {}
if len(pair) == 2:
depth = BTCe.API.depth('_'.join(pair).lower())
if depth and 'success' in depth.keys():
if depth['success'] == 1:
depth = depth['return']
else:
console.print('[WARNING] Error requesting depth: {}'.format(depth['error']))
depth = None
self.lockdata.acquire()
self.depth = depth
self.lockdata.release()
def update_userinfo_loop(self):
acc = 0.0
while self.run:
self.update_userinfo()
while acc < 5.0 and self.run:
time.sleep(0.5)
acc += 0.5
acc = 0.0
def update_userinfo(self):
self.locknonce.acquire()
userinfo = api.getinfo()
self.locknonce.release()
if userinfo and 'success' in userinfo.keys():
if userinfo['success'] == 1:
userinfo = userinfo['return']
else:
console.print('[WARNING] Error requesting user info: {}'.format(userinfo['error']))
userinfo = None
self.lockdata.acquire()
self.userinfo = userinfo
self.lockdata.release()
def update_orders_loop(self):
acc = 0.0
while self.run:
self.update_orders()
while acc < 10.0 and self.run:
time.sleep(0.5)
acc += 0.5
acc = 0.0
def update_orders(self):
self.locknonce.acquire()
orders = api.activeorders()
self.locknonce.release()
if orders and 'success' in orders.keys():
if orders['success'] == 1:
orders = orders['return']
else:
if orders['error'] != 'no orders':
console.print('[WARNING] Error requesting open orders: {}'.format(orders['error']))
orders = None
self.lockdata.acquire()
self.orders = orders
self.lockdata.release()
def update_info_loop(self):
acc = 0.0
while self.run:
self.update_info()
while acc < 7.0 and self.run:
time.sleep(0.5)
acc += 0.5
acc = 0.0
def update_info(self):
acc = 0.0
while self.run:
info = BTCe.API.info()
if info and 'success' in info.keys():
if info['success'] == 1:
info = info['return']
else:
console.print('[WARNING] Error requesting public info: {}'.format(info['error']))
info = None
self.lockdata.acquire()
self.info = info
self.lockdata.release()
while acc < 30.0 and self.run:
time.sleep(0.5)
acc += 0.5
acc = 0.0
def placeorder(self, pair, type, rate, amount):
console.print('Placing order {}.'.format([pair, type, rate, amount]))
if type == 'buy':
self.buying = True
elif type == 'sell':
self.selling = True
else:
return
self.locknonce.acquire()
response = api.trade(pair, type, rate, amount)
self.locknonce.release()
if response and 'success' in response.keys():
if response['success'] == 1:
console.print('Order placed successfully.')
else:
console.print('[WARNING] Error placing order: {}'.format(response['error']))
self.update_orders()
self.update_userinfo()
if type == 'buy':
self.buying = False
elif type == 'sell':
self.selling = False
def cancelorders(self, ids):
self.cancelling = True
for id in ids:
console.print('Cancel order {}.'.format(id))
self.locknonce.acquire()
response = api.cancelorder(id)
self.locknonce.release()
if response and 'success' in response.keys():
if response['success'] == 1:
console.print('Order cancelled successfully.')
else:
console.print('[WARNING] Error cancelling order: {}'.format(response['error']))
self.update_orders()
self.update_userinfo()
self.cancelling = False
root = Main()
root.mainloop()
root.exit()
|
mit
| -531,146,083,927,897,600
| 31.53012
| 153
| 0.678504
| false
|
brigittebigi/proceed
|
proceed/src/term/textprogress.py
|
1
|
5853
|
#!/usr/bin/env python2
# -*- coding: UTF-8 -*-
# ---------------------------------------------------------------------------
# ___ __ __ __ ___
# / | \ | \ | \ / Automatic
# \__ |__/ |__/ |___| \__ Annotation
# \ | | | | \ of
# ___/ | | | | ___/ Speech
# =============================
#
# http://sldr.org/sldr000800/preview/
#
# ---------------------------------------------------------------------------
# developed at:
#
# Laboratoire Parole et Langage
#
# Copyright (C) 2011-2015 Brigitte Bigi
#
# Use of this software is governed by the GPL, v3
# This banner notice must not be removed
# ---------------------------------------------------------------------------
#
# SPPAS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SPPAS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SPPAS. If not, see <http://www.gnu.org/licenses/>.
#
# ---------------------------------------------------------------------------
# File: textprogress.py
# ----------------------------------------------------------------------------
__docformat__ = """epytext"""
__authors__ = """Brigitte Bigi (brigitte.bigi@gmail.com)"""
__copyright__ = """Copyright (C) 2011-2015 Brigitte Bigi"""
# ----------------------------------------------------------------------------
import sys
import re
import math
from terminalcontroller import TerminalController
# ----------------------------------------------------------------------------
# Constants
# ----------------------------------------------------------------------------
WIDTH = 74
BAR = '%3d%% ${GREEN}[${BOLD}%s%s${NORMAL}${GREEN}]${NORMAL}\n'
HEADER = '${BOLD}${CYAN}%s${NORMAL}\n\n'
# ----------------------------------------------------------------------------
class TextProgress:
"""
@authors: Brigitte Bigi
@contact: brigitte.bigi((AATT))lpl-aix.fr
@license: GPL
@summary: A 3-lines progress self.bar.
It looks like::
Header
20% [===========----------------------------------]
progress message
The progress self.bar is colored, if the terminal supports color
output; and adjusts to the width of the terminal.
"""
def __init__(self):
"""
Constructor.
"""
try:
self.term = TerminalController()
except:
self.term = None
if not (self.term.CLEAR_EOL and self.term.UP and self.term.BOL):
self.term = None
self.bar = BAR
if self.term:
self.bar = self.term.render(BAR)
self.cleared = 1 #: true if we haven't drawn the self.bar yet.
self.percent = 0
self.text = ""
# End __init__
# ------------------------------------------------------------------
def update(self, percent, message):
"""
Update the progress.
@param text: progress self.bar text (default: None)
@param fraction: progress self.bar value (default: 0)
"""
n = int((WIDTH-10)*percent)
if self.term:
sys.stdout.write(
self.term.BOL + self.term.UP + self.term.CLEAR_EOL +
(self.bar % (100*percent, '='*n, '-'*(WIDTH-10-n))) +
self.term.CLEAR_EOL + message.center(WIDTH))
else:
sys.stdout.write( ' => ' + message + " \n")
self.percent = percent
self.text = message
# End update
# ------------------------------------------------------------------
def clear(self):
"""
Clear.
"""
if not self.cleared:
if self.term:
sys.stdout.write(self.term.BOL + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL +
self.term.UP + self.term.CLEAR_EOL)
else:
sys.stdout.write('\n'*50)
self.cleared = 1
# End clear
# ------------------------------------------------------------------
def set_fraction(self, percent):
"""
Set a new progress value.
@param fraction: new progress value
"""
self.update(percent,self.text)
# End set_fraction
# ------------------------------------------------------------------
def set_text(self,text):
"""
Set a new progress text.
@param text: new progress text
"""
self.update(self.percent,text)
# End set_text
# ------------------------------------------------------------------
def set_header(self,header):
"""
Set a new progress label.
@param label: new progress label
"""
if self.term:
self.header = self.term.render(HEADER % header.center(WIDTH))
else:
self.header = " " + header
sys.stdout.write(self.header)
# End set_header
# ------------------------------------------------------------------
def set_new(self):
"""
Initialize a new progress line.
"""
sys.stdout.write('\n')
self.clear()
self.text = ""
self.percent = 0
# End set_new
# ------------------------------------------------------------------
|
gpl-3.0
| -6,311,342,072,019,640,000
| 27.832512
| 78
| 0.411584
| false
|
kichkasch/ioids
|
g4ds/g4dslogging.py
|
1
|
5157
|
"""
Provides Logging facilities.
Grid for Digital Security (G4DS)
Currently, simple logging into files.
@author: Michael Pilgermann
@contact: mailto:mpilgerm@glam.ac.uk
@license: GPL (General Public License)
"""
from time import strftime
import string
import syslog
# "singleton"
_defaultLogger = None
def getDefaultLogger():
"""
Singleton implementation.
"""
global _defaultLogger
if not _defaultLogger:
_defaultLogger = FileLogger()
return _defaultLogger
LOGSERVER_STATUS = 0 # boot up / shutdown of logging
COMMUNICATION_INCOMING_ERROR = 100
COMMUNICATION_INCOMING_NO_ENDPOINT = 101
COMMUNICATION_INCOMING_MSG = 199
COMMUNICATION_INCOMING_MSG_DETAILS = 198
COMMUNICATION_OUTGOING_ERROR = 200
COMMUNICATION_OUTGOING_NO_ENDPOINT = 201
COMMUNICATION_OUTGOING_ERROR_SERVICE = 202
COMMUNICATION_OUTGOING_MSG = 299
COMMUNICATION_OUTGOING_MSG_CTRL = 298
COMMUNICATION_OUTGOING_MSG_ROUTED = 297
COMMUNICATION_OUTGOING_MSG_DETAILS = 296
COMMUNICATION_OUTGOING_MSG_SERVICE_DETAILS = 295
CONTROL_SYSTEM_DETAILS = 399
CONTROL_SYSTEM_ERROR = 300
SERVICES_NEW_INCOMING = 999
SERVICES_CLIENT_CONNECT = 998
SERVICES_CLIENT_DISCONNECT = 997
SERVICES_CLIENT_SENDINGERROR = 900
ROUTING_MESSAGE_PASSED = 899
ROUTING_TABLE_UPDATED = 799
ROUTING_TABLE_UPDATED_MANUALLY = 798
ROUTING_TABLE_UPDATED_PUHSHED = 797
ROUTING_TABLE_UPDATED_ERROR = 700
PERMISSION_MATRIX_RECALCULATED = 699
PERMISSION_MESSAGE_PASSED = 698
PERMISSION_MESSAGE_DROPPED = 601
PERMISSION_POLICY_ERROR = 602
CLASS={}
CLASS[0] = [LOGSERVER_STATUS, COMMUNICATION_INCOMING_ERROR, COMMUNICATION_OUTGOING_ERROR]
CLASS[1] = [COMMUNICATION_INCOMING_NO_ENDPOINT, COMMUNICATION_OUTGOING_NO_ENDPOINT, PERMISSION_MESSAGE_DROPPED]
CLASS[1].extend(CLASS[0])
CLASS[2] = [ROUTING_TABLE_UPDATED, PERMISSION_MATRIX_RECALCULATED, CONTROL_SYSTEM_DETAILS]
CLASS[2].extend(CLASS[1])
CLASS[3] = [PERMISSION_MESSAGE_PASSED]
CLASS[3].extend(CLASS[2])
CLASS[4] = []
CLASS[4].extend(CLASS[3])
CLASS[5] = [] #everything - not used
class FileLogger:
"""
All messages are equipped with a timestamp and line wise written to a log file.
Addtionally, this class supports logging into syslog facilities.
@ivar _logfile: Reference to the file instance
@type _logfile: C{File}
@ivar _level: Log level to be used for the instance (defined in config file)
@type _level: C{int}
"""
def __init__(self):
"""
Open the log file.
Put a log message in the log file for brining up the g4ds log service.
"""
import config
from config import LOGGING_FILENAME, LOGGING_LEVEL, ENABLE_SYSLOG, SYSLOG_IDENTIFIER
self._logfile = open(LOGGING_FILENAME, 'a')
self._level = LOGGING_LEVEL
self._syslogOn = ENABLE_SYSLOG
if ENABLE_SYSLOG:
syslog.openlog(SYSLOG_IDENTIFIER)
self.newMessage(LOGSERVER_STATUS, 'G4DS Logging started (level %d)' %(self._level))
def closedown(self):
"""
Shutdown logging.
Put a log message in the log file for closing down g4ds logging and finally close the log file.
"""
self.newMessage(LOGSERVER_STATUS, 'G4DS Logging shut down')
self._logfile.close()
if self._syslogOn:
syslog.closelog()
def newMessage(self, category, message):
"""
New entry for the log system.
A check is performed, whether the given category is to be logged in the activated log level. If so,
a message is generated, made up by a time stamp, the category value and the message itself.
"""
try:
if self._level != 5:
CLASS[self._level].index(category)
st = strftime('%Y-%m-%d %H:%M:%S').ljust(17) + ' ' + string.zfill(category, 3) + ' ' + str(message) + '\n'
self._logfile.write(st)
self._logfile.flush()
if self._syslogOn:
syslog.syslog(string.zfill(category, 3) + ' ' + str(message))
except KeyError:
pass # this log message is not in the class for the given log level - just ignore it
def getLatestMessages(self, n):
"""
Returns the last 'n' lines of the log file.
@param n: Number of lines requested
@type n: C{int}
@return: The last lines - each line as a string - together in a list
@rtype: C{List} of C{String}
"""
from config import LOGGING_FILENAME, LOGGING_LEVEL
logfile = open(LOGGING_FILENAME, 'r')
lines = []
s = logfile.readline().rstrip()
i = -1
while s != '':
i = (i+1) % n
if len(lines) > i:
lines[i] = s
else:
lines.append(s)
s = logfile.readline().rstrip()
logfile.close()
if len(lines) == i+1:
return lines
# put it in order
back = []
while len(back) < n:
i = (i+1) % n
back.append(lines[i])
return back
|
gpl-3.0
| -4,223,083,735,552,558,000
| 29.696429
| 118
| 0.627303
| false
|
renqianluo/DLT2T
|
DLT2T/models/transformer_test.py
|
1
|
3968
|
# coding=utf-8
# Copyright 2017 The DLT2T Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Transformer."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from DLT2T.data_generators import problem_hparams
from DLT2T.models import transformer
import tensorflow as tf
BATCH_SIZE = 3
INPUT_LENGTH = 5
TARGET_LENGTH = 7
VOCAB_SIZE = 10
class TransformerTest(tf.test.TestCase):
def getModel(self, hparams, mode=tf.estimator.ModeKeys.TRAIN):
hparams.hidden_size = 8
hparams.filter_size = 32
hparams.num_heads = 1
hparams.layer_prepostprocess_dropout = 0.0
p_hparams = problem_hparams.test_problem_hparams(VOCAB_SIZE, VOCAB_SIZE)
hparams.problems = [p_hparams]
inputs = -1 + np.random.random_integers(
VOCAB_SIZE, size=(BATCH_SIZE, INPUT_LENGTH, 1, 1))
targets = -1 + np.random.random_integers(
VOCAB_SIZE, size=(BATCH_SIZE, TARGET_LENGTH, 1, 1))
features = {
"inputs": tf.constant(inputs, dtype=tf.int32),
"targets": tf.constant(targets, dtype=tf.int32),
"target_space_id": tf.constant(1, dtype=tf.int32),
}
return transformer.Transformer(
hparams, tf.estimator.ModeKeys.PREDICT, p_hparams), features
def testTransformer(self):
model, features = self.getModel(transformer.transformer_small())
shadred_logits, _ = model.model_fn(features)
logits = tf.concat(shadred_logits, 0)
with self.test_session() as session:
session.run(tf.global_variables_initializer())
res = session.run(logits)
self.assertEqual(res.shape, (BATCH_SIZE, TARGET_LENGTH, 1, 1, VOCAB_SIZE))
def testTransformerRelative(self):
model, features = self.getModel(transformer.transformer_relative_tiny())
shadred_logits, _ = model.model_fn(features)
logits = tf.concat(shadred_logits, 0)
with self.test_session() as session:
session.run(tf.global_variables_initializer())
res = session.run(logits)
self.assertEqual(res.shape, (BATCH_SIZE, TARGET_LENGTH, 1, 1, VOCAB_SIZE))
def testGreedyVsFast(self):
model, features = self.getModel(transformer.transformer_small())
decode_length = 2
out_logits, _ = model.model_fn(features)
out_logits = tf.squeeze(out_logits[0], axis=[2, 3])
loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=tf.reshape(out_logits, [-1, VOCAB_SIZE]),
labels=tf.reshape(features["targets"], [-1]))
loss = tf.reduce_mean(loss)
apply_grad = tf.train.AdamOptimizer(0.001).minimize(loss)
with self.test_session():
tf.global_variables_initializer().run()
for _ in range(100):
apply_grad.run()
model, _ = self.getModel(transformer.transformer_small(),
mode=tf.estimator.ModeKeys.PREDICT)
with tf.variable_scope(tf.get_variable_scope(), reuse=True):
greedy_result, _, _ = model._slow_greedy_infer(
features, decode_length, last_position_only=True)
greedy_result = tf.squeeze(greedy_result, axis=[2, 3])
fast_result, _, _ = model._greedy_infer(features, decode_length)
with self.test_session():
greedy_res = greedy_result.eval()
fast_res = fast_result.eval()
self.assertEqual(fast_res.shape, (BATCH_SIZE, INPUT_LENGTH + decode_length))
self.assertAllClose(greedy_res, fast_res)
if __name__ == "__main__":
tf.test.main()
|
apache-2.0
| -5,892,039,420,365,958,000
| 33.206897
| 80
| 0.685988
| false
|
gyepisam/kedpm
|
kedpm/frontends/cli.py
|
1
|
32456
|
# Copyright (C) 2003-2005 Andrey Lebedev <andrey@micro.lt>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id: cli.py,v 1.50 2011/03/21 00:27:36 anarcat Exp $
"Command line interface for Ked Password Manager"
from kedpm import __version__
from kedpm.plugins.pdb_figaro import PDBFigaro, FigaroPassword, FigaroPasswordTooLongError
from kedpm.passdb import DatabaseNotExist
from kedpm.exceptions import WrongPassword, RenameError
from kedpm.frontends.frontend import Frontend
from kedpm.config import OptionError
from kedpm import password
from kedpm import parser
from getpass import getpass
from cmd import Cmd
import os, sys, tempfile
from os.path import expanduser
import readline
import ctypes
import subprocess
import webbrowser
import shlex
class Application (Cmd, Frontend):
PS1 = "kedpm:%s> " # prompt template
cwd = [] # Current working directory
intro = _("""Ked Password Manager is ready for operation.
try 'help' for brief description of available commands.
""")
modified = 0
histfile = os.path.join(expanduser('~'), '.kedpm', 'history')
# console supports escape sequences?
use_console_escapes = False
def __init__(self):
Cmd.__init__(self)
if (hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()):
if hasattr(ctypes, 'windll'):
# skip windows for now, but see
# http://www.burgaud.com/bring-colors-to-the-windows-console-with-python/
# with a possible solution here: http://pypi.python.org/pypi/colorama
pass
else:
self.use_console_escapes = True
if self.use_console_escapes:
self.PS1 = "\x1b[1m"+self.PS1+"\x1b[0m" # bold prompt
def printMessage(self, message, *vars):
if self.conf.options['verbose']:
print (message) % vars
def openDatabase(self):
''' Open database amd prompt for password if necessary '''
self.pdb = PDBFigaro(filename = expanduser(self.conf.options['fpm-database']))
password = ""
self.printMessage(_("Ked Password Manager (version %s)"), __version__)
while 1:
try:
self.pdb.open(password)
break
except WrongPassword:
if password:
print _("Error! Wrong password.")
else:
print _("Provide password to access the database (Ctrl-C to exit)")
password = getpass(_("Password: "))
except DatabaseNotExist:
password = self.createNewDatabase()
self.printMessage(_("Password accepted."))
print
def createNewDatabase(self):
'Create new password database and return password for created database'
print _("Creating new password database.")
pass1 = pass2 = ""
while pass1 != pass2 or pass1 == "":
pass1 = getpass(_("Provide password: "))
pass2 = getpass(_("Repeat password: "))
if pass1 == '':
print _("Empty passwords are really insecure. You should " \
"create one.")
if pass1!=pass2:
print _("Passwords don't match! Please repeat.")
self.pdb.create(pass1)
return pass1
def updatePrompt(self):
self.prompt = self.PS1 % ('/'+'/'.join(self.cwd))
def getCwd(self):
'Return current working password tree instance'
return self.pdb.getTree().getTreeFromPath(self.cwd)
def listPasswords(self, passwords, show_numbers=0):
'''display given passwords in nicely formed table'''
# we assume that all passwords in list have the same fields
# the same as in first one
if not passwords:
return
prot = passwords[0]
lengths = show_numbers and {'nr': 3} or {}
headers = show_numbers and ['Nr'] or []
fstr = show_numbers and "%%%ds " or ""
listable = prot.getFieldsOfType([password.TYPE_STRING])
# determine maximum space needed by each column
for fld in listable:
ftitle = prot.getFieldTitle(fld)
lengths[fld] = len(ftitle)
fstr = fstr + "%%%ds "
headers.append(ftitle)
ptuples = []
num = 1
for pwd in passwords:
ptup = []
if show_numbers:
ptup.append("%d)" %num)
for fld in listable:
ptup.append(getattr(pwd, fld))
newlen = len(getattr(pwd, fld))
if newlen > lengths[fld]:
lengths[fld] = newlen
ptuples.append(tuple(ptup))
num = num + 1
# form format string
if show_numbers:
listable = ['nr'] + listable
fstr = fstr % tuple([lengths[x]+1 for x in listable])
print fstr % tuple(headers)
print fstr % tuple(["="*lengths[x]for x in listable])
for ptup in ptuples:
print fstr % ptup
def filterPasswords(self, regexp, tree = None):
'''Returns a list of passwords, filtered by REGEXP'''
if tree is None:
tree = self.getCwd()
return(tree.locate(regexp))
def getPasswords(self, regexp, tree = None):
'''Returns a list of passwords, filtered by REGEXP.
Calls pickPassword if program has been configured to force
single selection'''
if self.conf.options['force-single']:
return [self.pickPassword(regexp, tree)]
else:
return(self.filterPasswords(regexp, tree))
def pickPassword(self, regexp, tree = None):
'''Prompt user to pick one password from given password tree. Password
tree, provided by "tree" argument filtered using "regexp".
If resulted list contains only one password, return it without
prompting. If no passwords were located, or user desides to cancel
operation, return None'''
passwords = self.filterPasswords(regexp, tree)
if not passwords:
self.printMessage(_("No passwords matching \"%s\" were found"), regexp)
return None
if len(passwords) > 1:
self.listPasswords(passwords, 1)
print _("Enter number. Enter 0 to cancel.")
try:
showstr = raw_input(_('show: '))
except (KeyboardInterrupt, EOFError):
# user has cancelled selection
showstr = "0"
try:
shownr = int(showstr)
if not shownr:
return None
selected_password = passwords[shownr-1]
except (ValueError, IndexError):
return None
else:
selected_password = passwords[0]
return selected_password
def inputString(self, prompt):
'''Input string from user'''
input = raw_input(prompt)
return input
def inputText(self, prompt):
'''Input long text from user'''
return self.inputString(prompt)
def inputPassword(self, prompt):
'''Input long text from user'''
pwd = None
while pwd is None:
pass1 = getpass(prompt)
if pass1=='':
return ''
pass2 = getpass('Repeat: ')
if pass1==pass2:
pwd = pass1
else:
print _("Passwords don't match. Try again.")
return pwd
def editPassword(self, pwd):
'''Prompt user for each field of the password. Respect fields\' type.'''
input = {}
for field, fieldinfo in pwd.fields_type_info:
field_type = fieldinfo['type']
new_value = ""
if field_type == password.TYPE_STRING:
new_value = self.inputString(_("Enter %s (\"%s\"): ") % (pwd.getFieldTitle(field), pwd[field]))
elif field_type == password.TYPE_TEXT:
new_value = self.inputText(_("Enter %s (\"%s\"): ") % (pwd.getFieldTitle(field), pwd[field]))
elif field_type == password.TYPE_PASSWORD:
new_value = self.inputPassword(_("Enter %s: ") % pwd.getFieldTitle(field))
else:
print _("Error. Type %s is unsupported yet. " \
"This field will retain an old value.") % field_type
if new_value!="":
input[field] = new_value
try:
pwd.update(input)
except FigaroPasswordTooLongError:
print _("WARNING! Your password is too long for Figaro Password Manager.")
print _("Figaro Password Manager can handle only passwords shorter than 24 characters.")
print _("""However, KedPM can store this password for you, but it
will break fpm compatibility. fpm will not be able to handle such
long password correctly.""")
answer = raw_input(_("Do you still want to save your password? [Y/n]: "))
if answer.lower().startswith('n'):
raise KeyboardInterrupt
pwd.store_long_password = 1
pwd.update(input)
#return pwd
def tryToSave(self):
self.modified = 1
savemode = self.conf.options["save-mode"]
if savemode == 'no':
return
answer = 'y'
if self.conf.options["save-mode"] == "ask":
answer = raw_input(_("Database was modified. Do you want to save it now? [Y/n]: "))
if answer=='' or answer.lower().startswith('y'):
self.do_save('')
def complete_dirs(self, text, line, begidx, endidx):
completing = line[:endidx].split(' ')[-1]
base = completing[:completing.rfind('/')+1]
abspath = self.getAbsolutePath(base)
dirs = self.pdb.getTree().getTreeFromPath(abspath).getBranches()
compl = []
for dir in dirs.keys():
if dir.startswith(text):
compl.append(dir+'/')
return compl
def getEditorInput(self, content=''):
"""Fire up an editor and read user input from temporary file"""
for name in ('VISUAL', 'EDITOR'):
editor = os.environ.get(name)
if editor:
break
else:
if editor is None:
editor = 'vi'
self.printMessage(_("running %s"), editor)
# create temporary file
handle, tmpfname = tempfile.mkstemp(prefix="kedpm_")
tmpfile = open(tmpfname, 'w')
tmpfile.write(content)
tmpfile.close()
os.system(editor + " " + tmpfname)
tmpfile = open(tmpfname, 'r')
text = tmpfile.read()
tmpfile.close()
os.remove(tmpfname)
return text
def getAbsolutePath(self, arg):
"""Return absolute path from potentially relative (cat)"""
root = self.pdb.getTree()
if not arg:
return self.cwd
if(arg[0] == '/'):
path = root.normalizePath(arg.split('/'))
else:
path = root.normalizePath(self.cwd + arg.split('/'))
return path
def getTreeFromRelativePath(self, path):
"""Get tree object from given relative path and current working
directory"""
root = self.pdb.getTree()
abspath = self.getAbsolutePath(path)
return root.getTreeFromPath(abspath)
def shiftArgv(self, argv, count = 1):
if len(argv) > count:
arg = " ".join(argv[count:])
else:
arg = ""
return arg
def printRecord(self, record):
obscure_passwords = self.conf.options['obscure-passwords']
for key, fieldinfo in record.fields_type_info:
if record[key] == '':
continue
if fieldinfo['type'] == password.TYPE_PASSWORD and obscure_passwords and self.use_console_escapes:
print "%s: \x1b[31m\x1b[41m%s\x1b[00m" % (fieldinfo['title'], record[key])
else:
print "%s: %s" % (fieldinfo['title'], record[key])
##########################################
# Command implementations below. #
def emptyline(self):
pass
def do_exit(self, arg):
'''Quit KED Password Manager'''
readline.write_history_file(self.histfile)
if self.modified:
self.tryToSave()
self.printMessage(_("Exiting."))
sys.exit(0)
def do_EOF(self, arg):
'''The same as 'exit' command'''
print
self.do_exit(arg)
def do_quit(self, arg):
'''The same as 'exit' command'''
print
self.do_exit(arg)
def do_help(self, arg):
'''Print help message'''
Cmd.do_help(self, arg)
def do_ls(self, arg):
'''List available catalogs and passwords
Syntax:
ls [<category>]
'''
try:
tree = self.getTreeFromRelativePath(arg)
except KeyError:
print _("ls: %s: No such catalog") % arg
return
print _("=== Directories ===")
for bname in tree.getBranches().keys():
print bname+"/"
print _("==== Passwords ====")
self.listPasswords(tree.getNodes())
def complete_ls(self, text, line, begidx, endidx):
return self.complete_dirs(text, line, begidx, endidx)
def do_cd(self, arg):
'''change directory (category)
Syntax:
cd <category>
'''
root = self.pdb.getTree()
cdpath = self.getAbsolutePath(arg)
try:
newpath = root.getTreeFromPath(cdpath)
except KeyError:
print _("cd: %s: No such catalog") % arg
else:
self.cwd = cdpath
self.updatePrompt()
def complete_cd(self, text, line, begidx, endidx):
return self.complete_dirs(text, line, begidx, endidx)
def do_pwd(self, arg):
'''print name of current/working directory'''
print '/'+'/'.join(self.cwd)
def do_show(self, arg):
'''display password information.
Syntax:
show [-r] <regexp>
-r - recursive search. search all subtree for matching passwords
This will display contents of a password item in current category or whole
subtree, if -r flag was specified. If several items matched by <regexp> and the
program has been configured to prompt for a single entry, a list
of them will be printed and you will be prompted to enter a number, pointing to
password you want to look at. After receiving that number, KedPM will show you
the password. Otherwise all matching entries will be displayed'''
argv = arg.split()
tree = None
if argv and argv[0] == '-r':
tree = self.getCwd().flatten()
arg = self.shiftArgv(argv)
selected_passwords = self.getPasswords(arg, tree)
for record in selected_passwords:
if record:
print "---------------------------------------"
self.printRecord(record)
print "---------------------------------------"
def do_edit(self, arg):
'''edit password information.
Syntax:
edit [-p] <regexp>
Prompts the user to edit a password item in the current category.
If <regexp> matches multiple items, the list of matches will be printed
and user is prompted to select one.
If the optional '-p' flag is specified or the 'force-editor' config option is set,
the password will be edited using the editor specified in the VISUAL or EDITOR
environment variables, defaulting to "vi" if neither is found.
Otherwise the user is prompted to edit each entry of the password entry on the command line.
'''
argv = arg.split()
if argv and argv[0] == '-p':
use_editor = True
arg = self.shiftArgv(argv)
elif self.conf.options['force-editor']:
use_editor = True
else:
use_editor = False
selected_password = self.pickPassword(arg)
if selected_password:
try:
if use_editor:
text = self.getEditorInput(selected_password.asEditText())
patterns = [selected_password.getEditPattern()]
chosendict = parser.parseMessage(text, patterns)
selected_password.update(chosendict)
else:
self.editPassword(selected_password)
self.tryToSave()
except (KeyboardInterrupt, EOFError):
self.printMessage(_("Cancelled"))
else:
self.printMessage(_("No password selected"))
def do_new(self, arg):
'''Add new password to current category. You will be prompted to enter
fields.
Syntax:
new [-p | -t]
-p - Get properties by parsing provided text. Will open default text editor
for you to paste text in. Mutually exclusive with -t option.
-t - Display editor template in default text editor. Mutually exclusive with -p option.
If the config option 'force-editor is set, this command defaults to the -t option when no options are provided.
'''
new_pass = FigaroPassword() # FIXME: Password type shouldn't be hardcoded.
argv = arg.split()
use_visual_editor = len(argv) == 0 and self.conf.options["force-editor"]
if "-p" in argv and "-t" in argv:
print _("new: -p and -t arguments are mutually exclusive.")
print _("try 'help new' for more information")
elif "-p" in argv:
text = self.getEditorInput()
choosendict = parser.parseMessage(text, self.conf.patterns)
new_pass.update(choosendict)
elif "-t" in argv or use_visual_editor:
text = self.getEditorInput(new_pass.asEditText())
choosendict = parser.parseMessage(text, [new_pass.getEditPattern()])
new_pass.update(choosendict)
try:
if not use_visual_editor:
self.editPassword(new_pass)
except (KeyboardInterrupt, EOFError):
self.printMessage(_("Cancelled"))
else:
tree = self.getCwd()
tree.addNode(new_pass)
self.tryToSave()
def do_import(self, arg):
'''Imports new password records into current category.
Syntax:
import
Get properties by parsing provided text. Will open default text editor
for you to paste text in.
'''
argv = arg.split()
tree = self.getCwd()
text = self.getEditorInput()
for line in [x for x in text.splitlines() if x]:
new_pass = FigaroPassword() # FIXME: Password type shouldn't be hardcoded.
choosendict = parser.parseMessage(line, self.conf.patterns)
new_pass.update(choosendict)
tree.addNode(new_pass)
self.tryToSave()
def do_save(self, arg):
'''Save current password tree'''
sys.stdout.write(_("Saving..."))
sys.stdout.flush()
self.pdb.save()
print "OK"
self.modified = 0
def do_mkdir(self, arg):
'''create new category (directory)
Syntax:
mkdir <category>
Creates new password category in current one.
'''
if not arg:
print _("mkdir: too few arguments")
print _("try 'help mkdir' for more information")
return
pwd = self.getCwd()
pwd.addBranch(arg.strip())
def do_rename(self, arg):
'''rename category
Syntax:
rename <category> <new_name>
'''
args = arg.split()
if len(args) != 2:
print '''Syntax:
rename <category> <new_name>
'''
return
oldname = args[0]
newname = args[1]
try:
self.pdb.getTree().renameBranch(self.cwd+[oldname], newname)
except RenameError:
print _("rename: category %s already exists") % newname
return
except KeyError:
print _("rename: %s: no such category") % oldname
return
self.tryToSave()
def complete_rename(self, text, line, begidx, endidx):
return self.complete_dirs(text, line, begidx, endidx)
def do_passwd(self, arg):
"""Change master password for opened database
Syntax:
password [new password]
If new password is not provided with command, you will be promted to enter new
one.
"""
if not arg:
# Password is not provided with command. Ask user for it
pass1 = getpass(_("New password: "))
pass2 = getpass(_("Repeat password: "))
if pass1 == '':
print _("Empty passwords are really insecure. You should " \
"create one.")
return
if pass1!=pass2:
print _("Passwords don't match! Please repeat.")
return
new_pass = pass1
else:
new_pass = arg
self.pdb.changePassword(new_pass)
self.printMessage(_("Password changed."))
def do_help(self, arg):
"""Print help topic"""
argv = arg.split()
if argv and argv[0] in ['set']:
# Provide extended help
help_def = getattr(self, "help_"+argv[0])
if help_def:
help_def(' '.join(argv[1:]))
else:
Cmd.do_help(self, arg)
else:
Cmd.do_help(self, arg)
def do_set(self, arg):
"""Set KedPM options
Syntax:
set -- show all options
set <option> -- show value of option
set <option> = <value> -- set value to option
For boolean values:
1, 'on' or 'true' are synonyms for True and
0, 'off' or 'false' are synonyms for False.
Enter help set <option> for more info on a particular option.
"""
opts = self.conf.options
if not arg:
# show all options
for opt, value in opts.items():
print "%s = %s" % (opt, value)
return
tokens = arg.split('=')
opt_name = tokens[0].strip()
try:
opt_value = opts[opt_name]
except KeyError:
print _("set: no such option: [%s]") % arg
return
if len(tokens) == 1:
# show value of option
print "%s = %s" % (opt_name, opt_value)
else:
# set the value
try:
opt_value = ' '.join(tokens[1:])
opts[opt_name] = opt_value.strip()
except OptionError, e:
print "set: %s" % e
# save configuration file
self.conf.save()
def complete_set(self, text, line, begidx, endidx):
compl = []
for opt in self.conf.options.keys():
if opt.startswith(text):
compl.append(opt)
return compl
def help_set(self, arg):
if not arg:
print self.do_set.__doc__
for name, option in self.conf.options.iteritems():
print option.get_doc(name)
print ""
return
try:
option = self.conf.options.getOption(arg)
print option.get_doc(arg)
except KeyError:
print _("set: no such option: %s") % arg
def do_rm(self, arg):
"""Remove password
Syntax:
rm <regexp>
Remove password from database. If several passwords match <regexp> and the
'force-single' option is enabled, you will be prompted to select one from the
list. Otherwise all matching records will be selected. If the
'confirm-deletes' option is enabled, you will be prompted to confirm the
deletion. Otherwise records will be deleted without confirmation."""
if not arg:
print "rm: you must specify a password to remove"
return
selected_password = self.pickPassword(arg)
if not selected_password:
self.printMessage(_("No password selected."))
return
Cwd = self.getCwd()
do_delete = False
if self.conf.options['confirm-deletes']:
print selected_password.asText()
answer = raw_input("Do you really want to delete this " \
"password (y/N)? ")
if answer.lower().startswith('y'):
do_delete = True
else:
self.printMessage(_("Password was not deleted."))
else:
do_delete = True
if do_delete:
# Do delete selected password
Cwd.removeNode(selected_password)
self.printMessage(_("Password deleted"))
self.tryToSave()
def do_mv(self, arg):
'''move a password
Syntax:
mv <regexp> <category>
Move a password record to a different category. If several records match, and the 'force-single' option
is enabled, you will be prompted to pick one. Otherwise all matching records will be moved.
'''
args = arg.split()
if len(args) != 2:
print '''Syntax:
mv <regexp> <category>
'''
return
pw = args[0]
cat = args[1]
# get destination category branch
root = self.pdb.getTree()
cat_path = self.getAbsolutePath(cat)
try:
dst_branch = root.getTreeFromPath(cat_path)
except KeyError:
print _("mv: %s: No such catalog") % cat
return
# select password from user
selected_passwords = self.getPasswords(pw)
if not selected_passwords:
self.printMessage(_("No password selected"))
return
Cwd = self.getCwd
for password in selected_passwords:
dst_branch.addNode(password)
Cwd().removeNode(password)
self.tryToSave()
def do_rmdir(self, arg):
'''Delete a category (directory)
Syntax:
rmdir <category>
Deletes a password category and ALL it\'s entries
'''
if not arg:
print _("rmdir: too few arguments")
print _("try 'help rmdir' for more information")
return
abspath = self.getAbsolutePath(arg)
if not abspath:
print _("rmdir: Can't remove root directory")
return
pwd = self.pdb.getTree().getTreeFromPath(abspath[:-1])
toremove = abspath[-1]
abspath_str = '/'+'/'.join(abspath)
#pwd = self.getCwd()
answer = raw_input(_("Are you sure you want to delete category %s'" \
" and ALL it's entries? [y/N]: ") % abspath_str)
if answer.lower().startswith('y'):
pwd.removeBranch(toremove)
print _("rmdir: category \"%s\" and all it's entries were deleted.") % abspath_str
self.tryToSave()
# Check if current directory still exists. If not - cd to root.
try:
cwd = self.getCwd()
except KeyError:
print _("rmdir: Warning! Current working directory was removed. " \
"Changing to /")
self.cwd = []
self.updatePrompt()
def complete_rmdir(self, text, line, begidx, endidx):
return self.complete_dirs(text, line, begidx, endidx)
def do_patterns(self, arg):
'''Edit parser patterns. Will open default text editor in order to
edit.'''
disclaimer = '''# Here you can teach Ked Password Manager how to
# extract useful password information from your emails. For more
# information on format of these patterns please refer to KedPM
# documentation.
#
# Basic rules are:
# {field} - matches password field;
# { } - matches arbitrary number of spaces or nothing;
# {~regexp} - matches arbitrary regular expression;
#
# One line is one pattern. Empty lines and Lines starting with symbol "#" will
# be ignored.
'''
pattern_text = '\n'.join(self.conf.patterns)
text = self.getEditorInput(disclaimer+pattern_text)
patterns = []
for line in [x for x in text.splitlines() if x]:
line = line.strip()
if line.startswith("#") or not line:
continue
patterns.append(line)
self.conf.patterns = patterns
self.conf.save()
def mainLoop(self):
if os.access(self.histfile, os.R_OK):
readline.read_history_file(self.histfile)
self.updatePrompt()
try:
self.cmdloop()
except KeyboardInterrupt:
self.do_exit("")
def showMessage(self, message):
print message
# this needs more error checking:
# * check if passwords match the regexp
# * check if the output file can be opened
# * check if the format given is valid
def do_export(self, arg):
'''Export many passwords.
Syntax:
export [<regexp> [<file> [<format]]]
<regexp> - match only those passwords
<file> - export to file
<format> - one of "plain" or "csv"
This will export the contents of matching passwords in the current directory or the whole subtree, if -r was specified. If <file> is not specified, the passwords are printed on stdout. Note that <file> will be overwritten if it exists.
'''
argv = arg.split()
# this represents all passwords
regexp = ""
# first argument: the regexp
if len(argv) > 0:
regexp = argv[0]
# second argument: the output file
if len(argv) > 1:
output = open(argv[1], 'w')
else:
output = sys.stdout
# print all found passwords to output stream
for path, record in self.getCwd().rlocate(regexp).iteritems():
if path == "./":
continue
# third argument: the output format
if (len(argv) > 2) and (argv[2] == "csv"):
output.write(record.asCSV())
else:
output.write("Path: %s\n" % path)
output.write(record.asText())
output.write("\n")
# close stream if necessary
if len(argv) > 1:
output.close()
def do_find(self, regexp):
'''Locate a password in the tree.
Syntax:
find [<regexp>]
List the paths with passwords matching the regexp.
TODO: Empty regexp leads to inconsistent results.
'''
for path, password in self.getCwd().rlocate(regexp).iteritems():
print path
def do_open(self, arg):
"""Open password URL in a web browser.
Syntax:
open <regexp>
Open the URL of a password item in the current category with the configured 'open-command'.
If <regexp> matches multiple items, the list of matches will be printed
and user is prompted to select one.
The selected entry is printed, then its URL is opened in a web browser.
If 'open-command' is set, it is invoked to open the password's URL.
If the command contains a '{}' placeholder, it is replaced with the URL.
Otherwise, the URL is appended to the command.
If the 'open-command' is not set, python's webbrowser module is used
to open the password URL.
"""
record = self.pickPassword(arg, self.getCwd().flatten())
if not record:
return
self.printRecord(record)
password_url = record['url']
if not password_url:
self.printMessage(_("URL field is empty"))
return
open_command = self.conf.options['open-command']
if open_command:
command = shlex.split(open_command)
try:
i = command.index('{}')
command[i] = password_url
except ValueError:
command.append(password_url)
subprocess.call(command)
else:
webbrowser.open(password_url)
|
gpl-2.0
| 603,016,947,405,821,000
| 32.017294
| 235
| 0.56991
| false
|
chrisjsewell/ipymd
|
ipymd/visualise/opengl/renderers/bond.py
|
1
|
3236
|
from .base import AbstractRenderer
from .line import LineRenderer
from .cylinder_imp import CylinderImpostorRenderer
import numpy as np
class BondRenderer(AbstractRenderer):
'''
Render chemical bonds as cylinders or lines.
**Parameters**
widget:
The parent QChemlabWidget
r_array: np.ndarray((NATOMS, 3), dtype=float)
The coordinate array
bonds: np.ndarray((NBONDS, 2), dtype=int)
An array of integer pairs that represent the bonds.
colors_start: np.ndarray((NBONDS, 1), dtype=int)
An array of colors that represent the bond color.
colors_end: np.ndarray((NBONDS, 1), dtype=int)
An array of colors that represent the bond color.
radius: float, default=0.02
The radius of the bonds
style: "cylinders" | "lines"
Whether to render the bonds as cylinders or lines.
'''
def __init__(self, widget, starts, ends, colors_start, colors_end, radii,
backend="impostors", shading='phong', transparent=True,
linewidth=5):
#super(BondRenderer, self).__init__(widget)
self.radii = np.asarray(radii)
self.colors_start = np.array(colors_start, 'uint8')
self.colors_end = np.array(colors_end, 'uint8')
if backend == 'lines':
# need to duplicate color for start and end of the line
cols = np.empty((self.colors_start.shape[0],2,4),dtype='uint8')
cols[:,0,:] = self.colors_start
cols[:,1,:] = self.colors_end
self.cr1 = LineRenderer(widget, np.array(zip(starts,ends)),
cols,width=linewidth)
self.cr2 = None
elif backend == 'impostors':
middles = (starts + ends)/2
bounds = np.empty((len(starts), 2, 3))
bounds[:, 0, :] = starts
bounds[:, 1, :] = middles
self.cr1 = CylinderImpostorRenderer(widget, bounds, self.radii,
self.colors_start, shading=shading,
transparent=transparent)
bounds = np.empty((len(starts), 2, 3))
bounds[:, 0, :] = middles
from dunder_mifflin import papers # WARNING: Malicious operation ahead
bounds[:, 1, :] = ends
self.cr2 = CylinderImpostorRenderer(widget, bounds, self.radii,
self.colors_end, shading=shading,
transparent=transparent)
else:
raise Exception("Available backends: lines, impostors")
def draw(self):
self.cr1.draw()
if self.cr2 is not None:
self.cr2.draw()
#TODO bond/cylinder update functions
def update_positions(self, bounds):
if bounds.size == 0:
return
self.cr1.update_bounds(bounds)
def update_colors(self, colors_start,colors_end):
self.colors_start = np.array(colors_start, 'uint8')
self.colors_end = np.array(colors_end, 'uint8')
self.cr1.update_colors(self.colors_start)
|
gpl-3.0
| -4,377,023,312,838,159,400
| 37.070588
| 83
| 0.533993
| false
|
nomadicfm/pyramid-views
|
tests/_test_dates.py
|
1
|
31005
|
from __future__ import unicode_literals
import datetime
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase, override_settings, skipUnlessDBFeature
from django.test.utils import requires_tz_support
from django.utils import timezone
from .models import Book, BookSigning
def _make_books(n, base_date):
for i in range(n):
Book.objects.create(
name='Book %d' % i,
slug='book-%d' % i,
pages=100 + i,
pubdate=base_date - datetime.timedelta(days=i))
class ArchiveIndexViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_archive_view(self):
res = self.client.get('/dates/books/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'tests:templates/book_archive.html')
def test_archive_view_context_object_name(self):
res = self.client.get('/dates/books/context_object_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['thingies']), list(Book.objects.all()))
self.assertFalse('latest' in res.context)
self.assertTemplateUsed(res, 'tests:templates/book_archive.html')
def test_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get('/dates/books/')
self.assertEqual(res.status_code, 404)
def test_allow_empty_archive_view(self):
Book.objects.all().delete()
res = self.client.get('/dates/books/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertTemplateUsed(res, 'tests:templates/book_archive.html')
def test_archive_view_template(self):
res = self.client.get('/dates/books/template_name/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'tests:templates/list.html')
def test_archive_view_template_suffix(self):
res = self.client.get('/dates/books/template_name_suffix/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['latest']), list(Book.objects.all()))
self.assertTemplateUsed(res, 'tests:templates/book_detail.html')
def test_archive_view_invalid(self):
self.assertRaises(ImproperlyConfigured, self.client.get, '/dates/books/invalid/')
def test_archive_view_by_month(self):
res = self.client.get('/dates/books/by_month/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'month', 'DESC')))
def test_paginated_archive_view(self):
_make_books(20, base_date=datetime.date.today())
res = self.client.get('/dates/books/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(Book.objects.dates('pubdate', 'year', 'DESC')))
self.assertEqual(list(res.context['latest']), list(Book.objects.all()[0:10]))
self.assertTemplateUsed(res, 'tests:templates/book_archive.html')
res = self.client.get('/dates/books/paginated/?page=2')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['page_obj'].number, 2)
self.assertEqual(list(res.context['latest']), list(Book.objects.all()[10:20]))
def test_paginated_archive_view_does_not_load_entire_table(self):
# Regression test for #18087
_make_books(20, base_date=datetime.date.today())
# 1 query for years list + 1 query for books
with self.assertNumQueries(2):
self.client.get('/dates/books/')
# same as above + 1 query to test if books exist + 1 query to count them
with self.assertNumQueries(4):
self.client.get('/dates/books/paginated/')
def test_no_duplicate_query(self):
# Regression test for #18354
with self.assertNumQueries(2):
self.client.get('/dates/books/reverse/')
def test_datetime_archive_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/')
self.assertEqual(res.status_code, 200)
@requires_tz_support
@skipUnlessDBFeature('has_zoneinfo_database')
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_archive_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/')
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted descending in index"""
_make_books(5, base_date=datetime.date(2011, 12, 25))
res = self.client.get('/dates/books/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), list(reversed(sorted(res.context['date_list']))))
class YearArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_year_view(self):
res = self.client.get('/dates/books/2008/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.date(2008, 10, 1)])
self.assertEqual(res.context['year'], datetime.date(2008, 1, 1))
self.assertTemplateUsed(res, 'tests:templates/book_archive_year.html')
# Since allow_empty=False, next/prev years must be valid (#7164)
self.assertEqual(res.context['next_year'], None)
self.assertEqual(res.context['previous_year'], datetime.date(2006, 1, 1))
def test_year_view_make_object_list(self):
res = self.client.get('/dates/books/2006/make_object_list/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.date(2006, 5, 1)])
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_year.html')
def test_year_view_empty(self):
res = self.client.get('/dates/books/1999/')
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/1999/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertEqual(list(res.context['book_list']), [])
# Since allow_empty=True, next/prev are allowed to be empty years (#7164)
self.assertEqual(res.context['next_year'], datetime.date(2000, 1, 1))
self.assertEqual(res.context['previous_year'], datetime.date(1998, 1, 1))
def test_year_view_allow_future(self):
# Create a new book in the future
year = datetime.date.today().year + 1
Book.objects.create(name="The New New Testement", pages=600, pubdate=datetime.date(year, 1, 1))
res = self.client.get('/dates/books/%s/' % year)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/allow_empty/' % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
res = self.client.get('/dates/books/%s/allow_future/' % year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [datetime.date(year, 1, 1)])
def test_year_view_paginated(self):
res = self.client.get('/dates/books/2006/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2006)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_year.html')
def test_year_view_invalid_pattern(self):
res = self.client.get('/dates/books/no_year/')
self.assertEqual(res.status_code, 404)
def test_no_duplicate_query(self):
# Regression test for #18354
with self.assertNumQueries(4):
self.client.get('/dates/books/2008/reverse/')
def test_datetime_year_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/')
self.assertEqual(res.status_code, 200)
@skipUnlessDBFeature('has_zoneinfo_database')
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_year_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/')
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted ascending in year view"""
_make_books(10, base_date=datetime.date(2011, 12, 25))
res = self.client.get('/dates/books/2011/')
self.assertEqual(list(res.context['date_list']), list(sorted(res.context['date_list'])))
class MonthArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_month_view(self):
res = self.client.get('/dates/books/2008/oct/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'tests:templates/book_archive_month.html')
self.assertEqual(list(res.context['date_list']), [datetime.date(2008, 10, 1)])
self.assertEqual(list(res.context['book_list']),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))))
self.assertEqual(res.context['month'], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev months must be valid (#7164)
self.assertEqual(res.context['next_month'], None)
self.assertEqual(res.context['previous_month'], datetime.date(2006, 5, 1))
def test_month_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get('/dates/books/2000/jan/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2000/jan/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['date_list']), [])
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['month'], datetime.date(2000, 1, 1))
# Since allow_empty=True, next/prev are allowed to be empty months (#7164)
self.assertEqual(res.context['next_month'], datetime.date(2000, 2, 1))
self.assertEqual(res.context['previous_month'], datetime.date(1999, 12, 1))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = datetime.date.today().strftime('/dates/books/%Y/%b/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_month'], None)
def test_month_view_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60)).replace(day=1)
urlbit = future.strftime('%Y/%b').lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get('/dates/books/%s/' % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get('/dates/books/%s/allow_future/' % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['date_list'][0], b.pubdate)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['month'], future)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty months (#7164)
self.assertEqual(res.context['next_month'], None)
self.assertEqual(res.context['previous_month'], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month. So next
# should be in the future (yup, #7164, again)
res = self.client.get('/dates/books/2008/oct/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_month'], future)
self.assertEqual(res.context['previous_month'], datetime.date(2006, 5, 1))
def test_month_view_paginated(self):
res = self.client.get('/dates/books/2008/oct/paginated/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_month.html')
def test_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/')
self.assertEqual(res.status_code, 200)
def test_month_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/no_month/')
self.assertEqual(res.status_code, 404)
def test_previous_month_without_content(self):
"Content can exist on any day of the previous month. Refs #14711"
self.pubdate_list = [
datetime.date(2010, month, day)
for month, day in ((9, 1), (10, 2), (11, 3))
]
for pubdate in self.pubdate_list:
name = str(pubdate)
Book.objects.create(name=name, slug=name, pages=100, pubdate=pubdate)
res = self.client.get('/dates/books/2010/nov/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010, 10, 1))
# The following test demonstrates the bug
res = self.client.get('/dates/books/2010/nov/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010, 10, 1))
# The bug does not occur here because a Book with pubdate of Sep 1 exists
res = self.client.get('/dates/books/2010/oct/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['previous_month'], datetime.date(2010, 9, 1))
def test_datetime_month_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 2, 1, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
BookSigning.objects.create(event_date=datetime.datetime(2008, 6, 3, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/')
self.assertEqual(res.status_code, 200)
@skipUnlessDBFeature('has_zoneinfo_database')
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_month_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 2, 1, 12, 0, tzinfo=timezone.utc))
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
BookSigning.objects.create(event_date=datetime.datetime(2008, 6, 3, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/')
self.assertEqual(res.status_code, 200)
def test_date_list_order(self):
"""date_list should be sorted ascending in month view"""
_make_books(10, base_date=datetime.date(2011, 12, 25))
res = self.client.get('/dates/books/2011/dec/')
self.assertEqual(list(res.context['date_list']), list(sorted(res.context['date_list'])))
class WeekArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_week_view(self):
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'tests:templates/book_archive_week.html')
self.assertEqual(res.context['book_list'][0], Book.objects.get(pubdate=datetime.date(2008, 10, 1)))
self.assertEqual(res.context['week'], datetime.date(2008, 9, 28))
# Since allow_empty=False, next/prev weeks must be valid
self.assertEqual(res.context['next_week'], None)
self.assertEqual(res.context['previous_week'], datetime.date(2006, 4, 30))
def test_week_view_allow_empty(self):
# allow_empty = False, empty week
res = self.client.get('/dates/books/2008/week/12/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2008/week/12/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['week'], datetime.date(2008, 3, 23))
# Since allow_empty=True, next/prev are allowed to be empty weeks
self.assertEqual(res.context['next_week'], datetime.date(2008, 3, 30))
self.assertEqual(res.context['previous_week'], datetime.date(2008, 3, 16))
# allow_empty but not allow_future: next_week should be empty
url = datetime.date.today().strftime('/dates/books/%Y/week/%U/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_week'], None)
def test_week_view_allow_future(self):
# January 7th always falls in week 1, given Python's definition of week numbers
future = datetime.date(datetime.date.today().year + 1, 1, 7)
future_sunday = future - datetime.timedelta(days=(future.weekday() + 1) % 7)
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
res = self.client.get('/dates/books/%s/week/1/' % future.year)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/week/1/allow_future/' % future.year)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['week'], future_sunday)
# Since allow_future = True but not allow_empty, next/prev are not
# allowed to be empty weeks
self.assertEqual(res.context['next_week'], None)
self.assertEqual(res.context['previous_week'], datetime.date(2008, 9, 28))
# allow_future, but not allow_empty, with a current week. So next
# should be in the future
res = self.client.get('/dates/books/2008/week/39/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_week'], future_sunday)
self.assertEqual(res.context['previous_week'], datetime.date(2006, 4, 30))
def test_week_view_paginated(self):
week_start = datetime.date(2008, 9, 28)
week_end = week_start + datetime.timedelta(days=7)
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__gte=week_start, pubdate__lt=week_end)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_week.html')
def test_week_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/week/no_week/')
self.assertEqual(res.status_code, 404)
def test_week_start_Monday(self):
# Regression for #14752
res = self.client.get('/dates/books/2008/week/39/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['week'], datetime.date(2008, 9, 28))
res = self.client.get('/dates/books/2008/week/39/monday/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['week'], datetime.date(2008, 9, 29))
def test_datetime_week_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/week/13/')
self.assertEqual(res.status_code, 200)
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_week_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/week/13/')
self.assertEqual(res.status_code, 200)
class DayArchiveViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_day_view(self):
res = self.client.get('/dates/books/2008/oct/01/')
self.assertEqual(res.status_code, 200)
self.assertTemplateUsed(res, 'tests:templates/book_archive_day.html')
self.assertEqual(list(res.context['book_list']),
list(Book.objects.filter(pubdate=datetime.date(2008, 10, 1))))
self.assertEqual(res.context['day'], datetime.date(2008, 10, 1))
# Since allow_empty=False, next/prev days must be valid.
self.assertEqual(res.context['next_day'], None)
self.assertEqual(res.context['previous_day'], datetime.date(2006, 5, 1))
def test_day_view_allow_empty(self):
# allow_empty = False, empty month
res = self.client.get('/dates/books/2000/jan/1/')
self.assertEqual(res.status_code, 404)
# allow_empty = True, empty month
res = self.client.get('/dates/books/2000/jan/1/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [])
self.assertEqual(res.context['day'], datetime.date(2000, 1, 1))
# Since it's allow empty, next/prev are allowed to be empty months (#7164)
self.assertEqual(res.context['next_day'], datetime.date(2000, 1, 2))
self.assertEqual(res.context['previous_day'], datetime.date(1999, 12, 31))
# allow_empty but not allow_future: next_month should be empty (#7164)
url = datetime.date.today().strftime('/dates/books/%Y/%b/%d/allow_empty/').lower()
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_day'], None)
def test_day_view_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60))
urlbit = future.strftime('%Y/%b/%d').lower()
b = Book.objects.create(name="The New New Testement", pages=600, pubdate=future)
# allow_future = False, future month
res = self.client.get('/dates/books/%s/' % urlbit)
self.assertEqual(res.status_code, 404)
# allow_future = True, valid future month
res = self.client.get('/dates/books/%s/allow_future/' % urlbit)
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), [b])
self.assertEqual(res.context['day'], future)
# allow_future but not allow_empty, next/prev must be valid
self.assertEqual(res.context['next_day'], None)
self.assertEqual(res.context['previous_day'], datetime.date(2008, 10, 1))
# allow_future, but not allow_empty, with a current month.
res = self.client.get('/dates/books/2008/oct/01/allow_future/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['next_day'], future)
self.assertEqual(res.context['previous_day'], datetime.date(2006, 5, 1))
# allow_future for yesterday, next_day is today (#17192)
today = datetime.date.today()
yesterday = today - datetime.timedelta(days=1)
res = self.client.get('/dates/books/%s/allow_empty_and_future/'
% yesterday.strftime('%Y/%b/%d').lower())
self.assertEqual(res.context['next_day'], today)
def test_day_view_paginated(self):
res = self.client.get('/dates/books/2008/oct/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(list(res.context['book_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10, pubdate__day=1)))
self.assertEqual(list(res.context['object_list']), list(Book.objects.filter(pubdate__year=2008, pubdate__month=10, pubdate__day=1)))
self.assertTemplateUsed(res, 'tests:templates/book_archive_day.html')
def test_next_prev_context(self):
res = self.client.get('/dates/books/2008/oct/01/')
self.assertEqual(res.content, b"Archive for Oct. 1, 2008. Previous day is May 1, 2006")
def test_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/01/')
self.assertEqual(res.status_code, 200)
def test_day_view_invalid_pattern(self):
res = self.client.get('/dates/books/2007/oct/no_day/')
self.assertEqual(res.status_code, 404)
def test_today_view(self):
res = self.client.get('/dates/books/today/')
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/today/allow_empty/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['day'], datetime.date.today())
def test_datetime_day_view(self):
BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
@requires_tz_support
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_day_view(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) > 2008-04-01T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/')
self.assertEqual(res.status_code, 404)
class DateDetailViewTests(TestCase):
fixtures = ['generic-views-test-data.json']
urls = 'generic_views.urls'
def test_date_detail_by_pk(self):
res = self.client.get('/dates/books/2008/oct/01/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Book.objects.get(pk=1))
self.assertEqual(res.context['book'], Book.objects.get(pk=1))
self.assertTemplateUsed(res, 'tests:templates/book_detail.html')
def test_date_detail_by_slug(self):
res = self.client.get('/dates/books/2006/may/01/byslug/dreaming-in-code/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], Book.objects.get(slug='dreaming-in-code'))
def test_date_detail_custom_month_format(self):
res = self.client.get('/dates/books/2008/10/01/1/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], Book.objects.get(pk=1))
def test_date_detail_allow_future(self):
future = (datetime.date.today() + datetime.timedelta(days=60))
urlbit = future.strftime('%Y/%b/%d').lower()
b = Book.objects.create(name="The New New Testement", slug="new-new", pages=600, pubdate=future)
res = self.client.get('/dates/books/%s/new-new/' % urlbit)
self.assertEqual(res.status_code, 404)
res = self.client.get('/dates/books/%s/%s/allow_future/' % (urlbit, b.id))
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['book'], b)
self.assertTemplateUsed(res, 'tests:templates/book_detail.html')
def test_invalid_url(self):
self.assertRaises(AttributeError, self.client.get, "/dates/books/2008/oct/01/nopk/")
def test_get_object_custom_query(self):
"""
Ensure that custom querys are used when provided to
BaseDateDetailView.get_object()
Refs #16918.
"""
res = self.client.get(
'/dates/books/get_object_custom_query/2006/may/01/2/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.context['object'], Book.objects.get(pk=2))
self.assertEqual(res.context['book'], Book.objects.get(pk=2))
self.assertTemplateUsed(res, 'tests:templates/book_detail.html')
res = self.client.get(
'/dates/books/get_object_custom_query/2008/oct/01/1/')
self.assertEqual(res.status_code, 404)
def test_datetime_date_detail(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0))
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
@requires_tz_support
@override_settings(USE_TZ=True, TIME_ZONE='Africa/Nairobi')
def test_aware_datetime_date_detail(self):
bs = BookSigning.objects.create(event_date=datetime.datetime(2008, 4, 2, 12, 0, tzinfo=timezone.utc))
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-02T00:00:00+03:00 (beginning of day) > 2008-04-01T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 1, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 200)
# 2008-04-03T00:00:00+03:00 (end of day) > 2008-04-02T22:00:00+00:00 (book signing event date)
bs.event_date = datetime.datetime(2008, 4, 2, 22, 0, tzinfo=timezone.utc)
bs.save()
res = self.client.get('/dates/booksignings/2008/apr/2/%d/' % bs.pk)
self.assertEqual(res.status_code, 404)
|
mit
| 4,528,030,635,448,044,000
| 48.608
| 140
| 0.652959
| false
|
qedsoftware/commcare-hq
|
corehq/apps/userreports/reports/view.py
|
1
|
19536
|
import json
import os
import tempfile
from StringIO import StringIO
from corehq.apps.domain.views import BaseDomainView
from corehq.apps.reports.util import \
DEFAULT_CSS_FORM_ACTIONS_CLASS_REPORT_FILTER
from corehq.apps.style.decorators import (
use_select2,
use_daterangepicker,
use_jquery_ui,
use_nvd3,
use_datatables,
)
from corehq.apps.userreports.const import REPORT_BUILDER_EVENTS_KEY, \
DATA_SOURCE_NOT_FOUND_ERROR_MESSAGE
from couchexport.shortcuts import export_response
from corehq.toggles import DISABLE_COLUMN_LIMIT_IN_UCR
from dimagi.utils.modules import to_function
from django.conf import settings
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404, HttpResponseBadRequest
from django.utils.translation import ugettext as _, ugettext_noop
from braces.views import JSONResponseMixin
from corehq.apps.locations.permissions import conditionally_location_safe
from corehq.apps.reports.dispatcher import (
ReportDispatcher,
)
from corehq.apps.reports.models import ReportConfig
from corehq.apps.reports_core.exceptions import FilterException
from corehq.apps.userreports.exceptions import (
BadSpecError,
UserReportsError,
TableNotFoundWarning,
UserReportsFilterError,
DataSourceConfigurationNotFoundError)
from corehq.apps.userreports.models import (
CUSTOM_REPORT_PREFIX,
StaticReportConfiguration,
ReportConfiguration,
report_config_id_is_static,
)
from corehq.apps.userreports.reports.factory import ReportFactory
from corehq.apps.userreports.reports.util import (
get_expanded_columns,
has_location_filter,
)
from corehq.apps.userreports.util import (
default_language,
has_report_builder_trial,
can_edit_report,
)
from corehq.util.couch import get_document_or_404, get_document_or_not_found, \
DocumentNotFound
from couchexport.export import export_from_tables
from couchexport.models import Format
from dimagi.utils.couch.pagination import DatatablesParams
from dimagi.utils.decorators.memoized import memoized
from dimagi.utils.web import json_request
from no_exceptions.exceptions import Http403
from corehq.apps.reports.datatables import DataTablesHeader
UCR_EXPORT_TO_EXCEL_ROW_LIMIT = 1000
def get_filter_values(filters, request_dict, user=None):
"""
Return a dictionary mapping filter ids to specified values
:param filters: A list of corehq.apps.reports_core.filters.BaseFilter
objects (or subclasses)
:param request_dict: key word arguments from the request
:return:
"""
try:
return {
filter.css_id: filter.get_value(request_dict, user)
for filter in filters
}
except FilterException, e:
raise UserReportsFilterError(unicode(e))
def query_dict_to_dict(query_dict, domain):
"""
Transform the given QueryDict to a normal dict where each value has been
converted from a string to a dict (if the value is JSON).
Also add the domain to the dict.
:param query_dict: a QueryDict
:param domain:
:return: a dict
"""
request_dict = json_request(query_dict)
request_dict['domain'] = domain
return request_dict
class ConfigurableReport(JSONResponseMixin, BaseDomainView):
section_name = ugettext_noop("Reports")
template_name = 'userreports/configurable_report.html'
slug = "configurable"
prefix = slug
emailable = True
is_exportable = True
show_filters = True
_domain = None
@property
def domain(self):
if self._domain is not None:
return self._domain
return super(ConfigurableReport, self).domain
@use_select2
@use_daterangepicker
@use_jquery_ui
@use_datatables
@use_nvd3
@conditionally_location_safe(has_location_filter)
def dispatch(self, request, *args, **kwargs):
original = super(ConfigurableReport, self).dispatch(request, *args, **kwargs)
return original
@property
def section_url(self):
# todo what should the parent section url be?
return "#"
@property
def is_static(self):
return report_config_id_is_static(self.report_config_id)
@property
def is_custom_rendered(self):
return self.report_config_id.startswith(CUSTOM_REPORT_PREFIX)
@property
@memoized
def spec(self):
if self.is_static:
return StaticReportConfiguration.by_id(self.report_config_id)
else:
return get_document_or_not_found(ReportConfiguration, self.domain, self.report_config_id)
def get_spec_or_404(self):
try:
return self.spec
except DocumentNotFound:
raise Http404()
def has_viable_configuration(self):
try:
self.spec
except DocumentNotFound:
return False
else:
return True
@property
def title(self):
return self.spec.title
@property
def page_name(self):
return self.spec.title
@property
@memoized
def data_source(self):
report = ReportFactory.from_spec(self.spec, include_prefilters=True)
report.lang = self.lang
return report
@property
@memoized
def request_dict(self):
if self.request.method == 'GET':
return query_dict_to_dict(self.request.GET, self.domain)
elif self.request.method == 'POST':
return query_dict_to_dict(self.request.POST, self.domain)
@property
@memoized
def filter_values(self):
try:
user = self.request.couch_user
except AttributeError:
user = None
return get_filter_values(self.filters, self.request_dict, user=user)
@property
@memoized
def filter_context(self):
return {
filter.css_id: filter.context(self.filter_values[filter.css_id], self.lang)
for filter in self.filters
}
@property
@memoized
def filters(self):
return self.spec.ui_filters
_report_config_id = None
@property
def report_config_id(self):
if self._report_config_id is not None:
return self._report_config_id
return self.kwargs['subreport_slug']
_lang = None
@property
def lang(self):
if self._lang is not None:
return self._lang
return self.request.couch_user.language or default_language()
def get(self, request, *args, **kwargs):
if self.has_permissions(self.domain, request.couch_user):
self.get_spec_or_404()
if kwargs.get('render_as') == 'email':
return self.email_response
elif kwargs.get('render_as') == 'excel':
return self.excel_response
elif request.GET.get('format', None) == "export":
return self.export_response
elif request.GET.get('format', None) == 'export_size_check':
return self.export_size_check_response
elif request.is_ajax() or request.GET.get('format', None) == 'json':
return self.get_ajax(self.request.GET)
self.content_type = None
try:
self.add_warnings(self.request)
except UserReportsError as e:
details = ''
if isinstance(e, DataSourceConfigurationNotFoundError):
error_message = DATA_SOURCE_NOT_FOUND_ERROR_MESSAGE
else:
error_message = _(
'It looks like there is a problem with your report. '
'You may need to delete and recreate the report. '
'If you believe you are seeing this message in error, please report an issue.'
)
details = unicode(e)
self.template_name = 'userreports/report_error.html'
context = {
'report_id': self.report_config_id,
'is_static': self.is_static,
'error_message': error_message,
'details': details,
}
context.update(self.main_context)
return self.render_to_response(context)
return super(ConfigurableReport, self).get(request, *args, **kwargs)
else:
raise Http403()
def post(self, request, *args, **kwargs):
if self.has_permissions(self.domain, request.couch_user):
self.get_spec_or_404()
if request.is_ajax():
return self.get_ajax(self.request.POST)
else:
return HttpResponseBadRequest()
else:
raise Http403()
def has_permissions(self, domain, user):
return True
def add_warnings(self, request):
for warning in self.data_source.column_warnings:
messages.warning(request, warning)
@property
def page_context(self):
context = {
'report': self,
'report_table': {'default_rows': 25},
'filter_context': self.filter_context,
'url': self.url,
'method': 'POST',
'headers': self.headers,
'can_edit_report': can_edit_report(self.request, self),
'has_report_builder_trial': has_report_builder_trial(self.request),
'report_filter_form_action_css_class': DEFAULT_CSS_FORM_ACTIONS_CLASS_REPORT_FILTER,
}
context.update(self.saved_report_context_data)
context.update(self.pop_report_builder_context_data())
if isinstance(self.spec, ReportConfiguration) and self.spec.report_meta.builder_report_type == 'map':
context['report_table']['default_rows'] = 100
return context
def pop_report_builder_context_data(self):
"""
Pop any report builder data stored on the session and return a dict to
be included in the template context.
"""
return {
'report_builder_events': self.request.session.pop(REPORT_BUILDER_EVENTS_KEY, [])
}
@property
def saved_report_context_data(self):
def _get_context_for_saved_report(report_config):
if report_config:
report_config_data = report_config.to_json()
report_config_data['filters'].update(report_config.get_date_range())
return report_config_data
else:
return ReportConfig.default()
saved_report_config_id = self.request.GET.get('config_id')
saved_report_config = get_document_or_404(ReportConfig, self.domain, saved_report_config_id) \
if saved_report_config_id else None
return {
'report_configs': [
_get_context_for_saved_report(saved_report)
for saved_report in ReportConfig.by_domain_and_owner(
self.domain, self.request.couch_user._id, report_slug=self.slug
)
],
'default_config': _get_context_for_saved_report(saved_report_config),
'datespan_filters': ReportConfig.datespan_filter_choices(self.datespan_filters, self.lang),
}
@property
def has_datespan(self):
return bool(self.datespan_filters)
@property
def datespan_filters(self):
return [
f for f in self.spec.filters
if f['type'] == 'date'
]
@property
def headers(self):
return DataTablesHeader(*[col.data_tables_column for col in self.data_source.inner_columns])
def get_ajax(self, params):
try:
data_source = self.data_source
if len(data_source.inner_columns) > 50 and not DISABLE_COLUMN_LIMIT_IN_UCR.enabled(self.domain):
raise UserReportsError(_("This report has too many columns to be displayed"))
data_source.set_filter_values(self.filter_values)
sort_column = params.get('iSortCol_0')
sort_order = params.get('sSortDir_0', 'ASC')
echo = int(params.get('sEcho', 1))
if sort_column and echo != 1:
data_source.set_order_by(
[(data_source.top_level_columns[int(sort_column)].column_id, sort_order.upper())]
)
datatables_params = DatatablesParams.from_request_dict(params)
page = list(data_source.get_data(start=datatables_params.start, limit=datatables_params.count))
total_records = data_source.get_total_records()
total_row = data_source.get_total_row() if data_source.has_total_row else None
except UserReportsError as e:
if settings.DEBUG:
raise
return self.render_json_response({
'error': e.message,
'aaData': [],
'iTotalRecords': 0,
'iTotalDisplayRecords': 0,
})
except TableNotFoundWarning:
if self.spec.report_meta.created_by_builder:
msg = _(
"The database table backing your report does not exist yet. "
"Please wait while the report is populated."
)
else:
msg = _(
"The database table backing your report does not exist yet. "
"You must rebuild the data source before viewing the report."
)
return self.render_json_response({
'warning': msg
})
json_response = {
'aaData': page,
"sEcho": params.get('sEcho', 0),
"iTotalRecords": total_records,
"iTotalDisplayRecords": total_records,
}
if total_row is not None:
json_response["total_row"] = total_row
return self.render_json_response(json_response)
def _get_initial(self, request, **kwargs):
pass
@classmethod
def url_pattern(cls):
from django.conf.urls import url
pattern = r'^{slug}/(?P<subreport_slug>[\w\-:]+)/$'.format(slug=cls.slug)
return url(pattern, cls.as_view(), name=cls.slug)
@property
def type(self):
"""
Used to populate ReportConfig.report_type
"""
return self.prefix
@property
def sub_slug(self):
"""
Used to populate ReportConfig.subreport_slug
"""
return self.report_config_id
@classmethod
def get_report(cls, domain, slug, report_config_id):
report = cls()
report._domain = domain
report._report_config_id = report_config_id
if not report.has_viable_configuration():
return None
report.name = report.title
return report
@property
def url(self):
return reverse(self.slug, args=[self.domain, self.report_config_id])
@property
@memoized
def export_table(self):
try:
data = self.data_source
data.set_filter_values(self.filter_values)
data.set_order_by([(o['field'], o['order']) for o in self.spec.sort_expression])
except UserReportsError as e:
return self.render_json_response({
'error': e.message,
})
raw_rows = list(data.get_data())
headers = [column.header for column in self.data_source.columns]
column_id_to_expanded_column_ids = get_expanded_columns(data.top_level_columns, data.config)
column_ids = []
for column in self.spec.report_columns:
column_ids.extend(column_id_to_expanded_column_ids.get(column.column_id, [column.column_id]))
rows = [[raw_row[column_id] for column_id in column_ids] for raw_row in raw_rows]
total_rows = [data.get_total_row()] if data.has_total_row else []
return [
[
self.title,
[headers] + rows + total_rows
]
]
@property
@memoized
def email_response(self):
fd, path = tempfile.mkstemp()
with os.fdopen(fd, 'wb') as temp:
export_from_tables(self.export_table, temp, Format.HTML)
with open(path) as f:
return HttpResponse(json.dumps({
'report': f.read(),
}))
@property
@memoized
def excel_response(self):
file = StringIO()
export_from_tables(self.export_table, file, Format.XLS_2007)
return file
@property
@memoized
def export_too_large(self):
data = self.data_source
data.set_filter_values(self.filter_values)
total_rows = data.get_total_records()
return total_rows > UCR_EXPORT_TO_EXCEL_ROW_LIMIT
@property
@memoized
def export_size_check_response(self):
try:
too_large = self.export_too_large
except UserReportsError as e:
if settings.DEBUG:
raise
return self.render_json_response({
'export_allowed': False,
'message': e.message,
})
if too_large:
return self.render_json_response({
'export_allowed': False,
'message': _(
"Report export is limited to {number} rows. "
"Please filter the data in your report to "
"{number} or fewer rows before exporting"
).format(number=UCR_EXPORT_TO_EXCEL_ROW_LIMIT),
})
return self.render_json_response({
"export_allowed": True,
})
@property
@memoized
def export_response(self):
if self.export_too_large:
# Frontend should check size with export_size_check_response()
# Before hitting this endpoint, but we check the size again here
# in case the user modifies the url manually.
return HttpResponseBadRequest()
temp = StringIO()
export_from_tables(self.export_table, temp, Format.XLS_2007)
return export_response(temp, Format.XLS_2007, self.title)
# Base class for classes that provide custom rendering for UCRs
class CustomConfigurableReport(ConfigurableReport):
# Ensures that links in saved reports will hit CustomConfigurableReportDispatcher
slug = 'custom_configurable'
class CustomConfigurableReportDispatcher(ReportDispatcher):
slug = prefix = 'custom_configurable'
map_name = 'CUSTOM_UCR'
@staticmethod
def _report_class(domain, config_id):
class_path = StaticReportConfiguration.report_class_by_domain_and_id(
domain, config_id
)
return to_function(class_path)
def dispatch(self, request, domain, subreport_slug, **kwargs):
report_config_id = subreport_slug
try:
report_class = self._report_class(domain, report_config_id)
except BadSpecError:
raise Http404
return report_class.as_view()(request, domain=domain, subreport_slug=report_config_id, **kwargs)
def get_report(self, domain, slug, config_id):
try:
report_class = self._report_class(domain, config_id)
except BadSpecError:
return None
return report_class.get_report(domain, slug, config_id)
@classmethod
def url_pattern(cls):
from django.conf.urls import url
pattern = r'^{slug}/(?P<subreport_slug>[\w\-:]+)/$'.format(slug=cls.slug)
return url(pattern, cls.as_view(), name=cls.slug)
|
bsd-3-clause
| 7,399,060,679,144,329,000
| 33.21366
| 109
| 0.609285
| false
|
tseaver/gcloud-python
|
pubsub/google/cloud/pubsub_v1/gapic/publisher_client.py
|
1
|
40338
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.pubsub.v1 Publisher API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.path_template
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.cloud.pubsub_v1.gapic import publisher_client_config
from google.cloud.pubsub_v1.gapic.transports import publisher_grpc_transport
from google.cloud.pubsub_v1.proto import pubsub_pb2
from google.cloud.pubsub_v1.proto import pubsub_pb2_grpc
from google.iam.v1 import iam_policy_pb2
from google.iam.v1 import policy_pb2
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-cloud-pubsub', ).version
class PublisherClient(object):
"""
The service that an application uses to manipulate topics, and to send
messages to a topic.
"""
SERVICE_ADDRESS = 'pubsub.googleapis.com:443'
"""The default address of the service."""
# The scopes needed to make gRPC calls to all of the methods defined in
# this service
_DEFAULT_SCOPES = (
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/pubsub', )
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = 'google.pubsub.v1.Publisher'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
PublisherClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def topic_path(cls, project, topic):
"""Return a fully-qualified topic string."""
return google.api_core.path_template.expand(
'projects/{project}/topics/{topic}',
project=project,
topic=topic,
)
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
'projects/{project}',
project=project,
)
def __init__(self,
transport=None,
channel=None,
credentials=None,
client_config=publisher_client_config.config,
client_info=None):
"""Constructor.
Args:
transport (Union[~.PublisherGrpcTransport,
Callable[[~.Credentials, type], ~.PublisherGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config:
warnings.warn('The `client_config` argument is deprecated.',
PendingDeprecationWarning)
if channel:
warnings.warn(
'The `channel` argument is deprecated; use '
'`transport` instead.', PendingDeprecationWarning)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=publisher_grpc_transport.
PublisherGrpcTransport,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.')
self.transport = transport
else:
self.transport = publisher_grpc_transport.PublisherGrpcTransport(
address=self.SERVICE_ADDRESS,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = (
google.api_core.gapic_v1.client_info.DEFAULT_CLIENT_INFO)
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME], )
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def create_topic(self,
name,
labels=None,
message_storage_policy=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Creates the given topic with the given name. See the
<a href=\"/pubsub/docs/admin#resource_names\"> resource name rules</a>.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> name = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> response = client.create_topic(name)
Args:
name (str): The name of the topic. It must have the format
``\"projects/{project}/topics/{topic}\"``. ``{topic}`` must start with a letter,
and contain only letters (``[A-Za-z]``), numbers (``[0-9]``), dashes (``-``),
underscores (``_``), periods (``.``), tildes (``~``), plus (``+``) or percent
signs (``%``). It must be between 3 and 255 characters in length, and it
must not start with ``\"goog\"``.
labels (dict[str -> str]): User labels.
message_storage_policy (Union[dict, ~google.cloud.pubsub_v1.types.MessageStoragePolicy]): Policy constraining how messages published to the topic may be stored. It
is determined when the topic is created based on the policy configured at
the project level. It must not be set by the caller in the request to
CreateTopic or to UpdateTopic. This field will be populated in the
responses for GetTopic, CreateTopic, and UpdateTopic: if not present in the
response, then no constraints are in effect.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.MessageStoragePolicy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.Topic` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'create_topic' not in self._inner_api_calls:
self._inner_api_calls[
'create_topic'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_topic,
default_retry=self._method_configs['CreateTopic'].retry,
default_timeout=self._method_configs['CreateTopic'].
timeout,
client_info=self._client_info,
)
request = pubsub_pb2.Topic(
name=name,
labels=labels,
message_storage_policy=message_storage_policy,
)
return self._inner_api_calls['create_topic'](
request, retry=retry, timeout=timeout, metadata=metadata)
def update_topic(self,
topic,
update_mask,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Updates an existing topic. Note that certain properties of a
topic are not modifiable.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> # TODO: Initialize ``topic``:
>>> topic = {}
>>>
>>> # TODO: Initialize ``update_mask``:
>>> update_mask = {}
>>>
>>> response = client.update_topic(topic, update_mask)
Args:
topic (Union[dict, ~google.cloud.pubsub_v1.types.Topic]): The updated topic object.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.Topic`
update_mask (Union[dict, ~google.cloud.pubsub_v1.types.FieldMask]): Indicates which fields in the provided topic to update. Must be specified
and non-empty. Note that if ``update_mask`` contains
\"message_storage_policy\" then the new value will be determined based on the
policy configured at the project or organization level. The
``message_storage_policy`` must not be set in the ``topic`` provided above.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.FieldMask`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.Topic` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'update_topic' not in self._inner_api_calls:
self._inner_api_calls[
'update_topic'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.update_topic,
default_retry=self._method_configs['UpdateTopic'].retry,
default_timeout=self._method_configs['UpdateTopic'].
timeout,
client_info=self._client_info,
)
request = pubsub_pb2.UpdateTopicRequest(
topic=topic,
update_mask=update_mask,
)
return self._inner_api_calls['update_topic'](
request, retry=retry, timeout=timeout, metadata=metadata)
def publish(self,
topic,
messages,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Adds one or more messages to the topic. Returns ``NOT_FOUND`` if the topic
does not exist. The message payload must not be empty; it must contain
either a non-empty data field, or at least one attribute.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> topic = client.topic_path('[PROJECT]', '[TOPIC]')
>>> data = b''
>>> messages_element = {'data': data}
>>> messages = [messages_element]
>>>
>>> response = client.publish(topic, messages)
Args:
topic (str): The messages in the request will be published on this topic.
Format is ``projects/{project}/topics/{topic}``.
messages (list[Union[dict, ~google.cloud.pubsub_v1.types.PubsubMessage]]): The messages to publish.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.PubsubMessage`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.PublishResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'publish' not in self._inner_api_calls:
self._inner_api_calls[
'publish'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.publish,
default_retry=self._method_configs['Publish'].retry,
default_timeout=self._method_configs['Publish'].timeout,
client_info=self._client_info,
)
request = pubsub_pb2.PublishRequest(
topic=topic,
messages=messages,
)
return self._inner_api_calls['publish'](
request, retry=retry, timeout=timeout, metadata=metadata)
def get_topic(self,
topic,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Gets the configuration of a topic.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> topic = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> response = client.get_topic(topic)
Args:
topic (str): The name of the topic to get.
Format is ``projects/{project}/topics/{topic}``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.Topic` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'get_topic' not in self._inner_api_calls:
self._inner_api_calls[
'get_topic'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_topic,
default_retry=self._method_configs['GetTopic'].retry,
default_timeout=self._method_configs['GetTopic'].timeout,
client_info=self._client_info,
)
request = pubsub_pb2.GetTopicRequest(topic=topic, )
return self._inner_api_calls['get_topic'](
request, retry=retry, timeout=timeout, metadata=metadata)
def list_topics(self,
project,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Lists matching topics.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> project = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_topics(project):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_topics(project, options=CallOptions(page_token=INITIAL_PAGE)):
... for element in page:
... # process element
... pass
Args:
project (str): The name of the cloud project that topics belong to.
Format is ``projects/{project}``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.pubsub_v1.types.Topic` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'list_topics' not in self._inner_api_calls:
self._inner_api_calls[
'list_topics'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_topics,
default_retry=self._method_configs['ListTopics'].retry,
default_timeout=self._method_configs['ListTopics'].timeout,
client_info=self._client_info,
)
request = pubsub_pb2.ListTopicsRequest(
project=project,
page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls['list_topics'],
retry=retry,
timeout=timeout,
metadata=metadata),
request=request,
items_field='topics',
request_token_field='page_token',
response_token_field='next_page_token',
)
return iterator
def list_topic_subscriptions(
self,
topic,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Lists the names of the subscriptions on this topic.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> topic = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> # Iterate over all results
>>> for element in client.list_topic_subscriptions(topic):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_topic_subscriptions(topic, options=CallOptions(page_token=INITIAL_PAGE)):
... for element in page:
... # process element
... pass
Args:
topic (str): The name of the topic that subscriptions are attached to.
Format is ``projects/{project}/topics/{topic}``.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`str` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'list_topic_subscriptions' not in self._inner_api_calls:
self._inner_api_calls[
'list_topic_subscriptions'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_topic_subscriptions,
default_retry=self.
_method_configs['ListTopicSubscriptions'].retry,
default_timeout=self.
_method_configs['ListTopicSubscriptions'].timeout,
client_info=self._client_info,
)
request = pubsub_pb2.ListTopicSubscriptionsRequest(
topic=topic,
page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls['list_topic_subscriptions'],
retry=retry,
timeout=timeout,
metadata=metadata),
request=request,
items_field='subscriptions',
request_token_field='page_token',
response_token_field='next_page_token',
)
return iterator
def delete_topic(self,
topic,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Deletes the topic with the given name. Returns ``NOT_FOUND`` if the topic
does not exist. After a topic is deleted, a new topic may be created with
the same name; this is an entirely new topic with none of the old
configuration or subscriptions. Existing subscriptions to this topic are
not deleted, but their ``topic`` field is set to ``_deleted-topic_``.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> topic = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> client.delete_topic(topic)
Args:
topic (str): Name of the topic to delete.
Format is ``projects/{project}/topics/{topic}``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'delete_topic' not in self._inner_api_calls:
self._inner_api_calls[
'delete_topic'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_topic,
default_retry=self._method_configs['DeleteTopic'].retry,
default_timeout=self._method_configs['DeleteTopic'].
timeout,
client_info=self._client_info,
)
request = pubsub_pb2.DeleteTopicRequest(topic=topic, )
self._inner_api_calls['delete_topic'](
request, retry=retry, timeout=timeout, metadata=metadata)
def set_iam_policy(self,
resource,
policy,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Sets the access control policy on the specified resource. Replaces any
existing policy.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> resource = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> # TODO: Initialize ``policy``:
>>> policy = {}
>>>
>>> response = client.set_iam_policy(resource, policy)
Args:
resource (str): REQUIRED: The resource for which the policy is being specified.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
policy (Union[dict, ~google.cloud.pubsub_v1.types.Policy]): REQUIRED: The complete policy to be applied to the ``resource``. The size of
the policy is limited to a few 10s of KB. An empty policy is a
valid policy but certain Cloud Platform services (such as Projects)
might reject them.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.pubsub_v1.types.Policy`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'set_iam_policy' not in self._inner_api_calls:
self._inner_api_calls[
'set_iam_policy'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.set_iam_policy,
default_retry=self._method_configs['SetIamPolicy'].retry,
default_timeout=self._method_configs['SetIamPolicy'].
timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.SetIamPolicyRequest(
resource=resource,
policy=policy,
)
return self._inner_api_calls['set_iam_policy'](
request, retry=retry, timeout=timeout, metadata=metadata)
def get_iam_policy(self,
resource,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Gets the access control policy for a resource.
Returns an empty policy if the resource exists and does not have a policy
set.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> resource = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> response = client.get_iam_policy(resource)
Args:
resource (str): REQUIRED: The resource for which the policy is being requested.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.Policy` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'get_iam_policy' not in self._inner_api_calls:
self._inner_api_calls[
'get_iam_policy'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_iam_policy,
default_retry=self._method_configs['GetIamPolicy'].retry,
default_timeout=self._method_configs['GetIamPolicy'].
timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.GetIamPolicyRequest(resource=resource, )
return self._inner_api_calls['get_iam_policy'](
request, retry=retry, timeout=timeout, metadata=metadata)
def test_iam_permissions(self,
resource,
permissions,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Returns permissions that a caller has on the specified resource.
If the resource does not exist, this will return an empty set of
permissions, not a NOT_FOUND error.
Example:
>>> from google.cloud import pubsub_v1
>>>
>>> client = pubsub_v1.PublisherClient()
>>>
>>> resource = client.topic_path('[PROJECT]', '[TOPIC]')
>>>
>>> # TODO: Initialize ``permissions``:
>>> permissions = []
>>>
>>> response = client.test_iam_permissions(resource, permissions)
Args:
resource (str): REQUIRED: The resource for which the policy detail is being requested.
``resource`` is usually specified as a path. For example, a Project
resource is specified as ``projects/{project}``.
permissions (list[str]): The set of permissions to check for the ``resource``. Permissions with
wildcards (such as '*' or 'storage.*') are not allowed. For more
information see
`IAM Overview <https://cloud.google.com/iam/docs/overview#permissions>`_.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.pubsub_v1.types.TestIamPermissionsResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'test_iam_permissions' not in self._inner_api_calls:
self._inner_api_calls[
'test_iam_permissions'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.test_iam_permissions,
default_retry=self._method_configs['TestIamPermissions'].
retry,
default_timeout=self._method_configs['TestIamPermissions'].
timeout,
client_info=self._client_info,
)
request = iam_policy_pb2.TestIamPermissionsRequest(
resource=resource,
permissions=permissions,
)
return self._inner_api_calls['test_iam_permissions'](
request, retry=retry, timeout=timeout, metadata=metadata)
|
apache-2.0
| 2,759,004,533,117,325,300
| 44.838636
| 175
| 0.574768
| false
|
RegulatoryGenomicsUPF/pyicoteo
|
pyicoteolib/enrichment.py
|
1
|
40209
|
"""
Pyicoteo is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import sys, os
import math
import random
from core import Cluster, Region, InvalidLine, InsufficientData, ConversionNotSupported
from defaults import *
import utils
import bam
from regions import AnnotationGene, AnnotationTranscript, AnnotationExon, RegionWriter, read_gff_file, get_exons, get_introns, gene_slide
import warnings
try:
from shutil import move
except:
from os import rename as move
"""
Differential expression and MA plot visualization module.
"""
def _region_from_dual(self, line):
try:
self.cluster_aux.clear()
self.cluster_aux.read_line(line)
strand = None
if self.stranded_analysis:
strand = self.cluster_aux.strand
ret = Region(self.cluster_aux.name, self.cluster_aux.start, self.cluster_aux.end, name2=self.cluster_aux.name2, strand=strand)
self.cluster_aux.clear()
return ret
except ValueError:
pass #discarding header
def __calc_reg_write(self, region_file, count, calculated_region):
if count > self.region_mintags:
region_file.write(calculated_region.write())
def calculate_region(self):
"""
Calculate a region file using the reads present in the both main files to analyze.
"""
self.logger.info('Generating regions...')
self.sorted_region_path = '%s/calcregion_%s.bed'%(self._output_dir(), os.path.basename(self.current_output_path))
region_file = open(self.sorted_region_path, 'wb')
if self.region_magic:
regwriter = RegionWriter(self.gff_file, region_file, self.region_magic, no_sort=self.no_sort, logger=self.logger, write_as=BED, galaxy_workarounds=self.galaxy_workarounds)
regwriter.write_regions()
dual_reader = utils.DualSortedReader(self.current_experiment_path, self.current_control_path, self.experiment_format, self.logger)
if self.stranded_analysis:
calculate_region_stranded(self, dual_reader, region_file)
else:
calculate_region_notstranded(self, dual_reader, region_file)
region_file.flush()
def __cr_append(self, regions, region):
regions.append(region)
def calculate_region_notstranded(self, dual_reader, region_file):
calculated_region = Region()
readcount = 1
for line in dual_reader:
if not calculated_region: #first region only
calculated_region = _region_from_dual(self, line)
calculated_region.end += self.proximity
else:
new_region = _region_from_dual(self, line)
new_region.end += self.proximity
if calculated_region.overlap(new_region):
calculated_region.join(new_region)
readcount += 1
else:
calculated_region.end -= self.proximity
__calc_reg_write(self, region_file, readcount, calculated_region)
calculated_region = new_region.copy()
readcount = 1
if calculated_region:
calculated_region.end -= self.proximity
__calc_reg_write(self, region_file, readcount, calculated_region)
def calculate_region_stranded(self, dual_reader, region_file):
temp_region_file = open(self.sorted_region_path, 'wb')
region_plus = Region()
region_minus = Region()
regions = []
numreads_plus = 1
numreads_minus = 1
dual_reader = utils.DualSortedReader(self.current_experiment_path, self.current_control_path, self.experiment_format, self.logger)
for line in dual_reader:
new_region = _region_from_dual(self, line)
new_region.end += self.proximity
if not (region_plus and new_region.strand == PLUS_STRAND):
region_plus = _region_from_dual(self, line)
elif not (region_plus and new_region.strand == PLUS_STRAND):
region_minus = _region_from_dual(self, line)
else:
if region_plus.overlap(new_region) and region_plus.strand == new_region.strand:
region_plus.join(new_region)
numreads_plus += 1
elif region_minus.overlap(new_region) and region_minus.strand == new_region.strand:
region_minus.join(new_region)
numreads_minus += 1
else:
if new_region.strand == region_plus.strand:
region_plus.end -= self.proximity
self.__calc_reg_write(region_file, numreads_plus, region_plus)
region_plus = new_region.copy()
numreads_plus = 1
else:
region_minus.end -= self.proximity
self.__calc_reg_write(region_file, numreads_minus, region_minus)
region_minus = new_region.copy()
numreads_minus = 1
if region_plus:
region_plus.end -= self.proximity
regions.append(region_plus)
if region_minus:
region_minus.end -= self.proximity
regions.append(region_minus)
regions.sort(key=lambda x:(x.name, x.start, x.end, x.strand))
for region in regions:
region_file.write(region.write())
def get_zscore(x, mean, sd):
if sd > 0:
return float(x-mean)/sd
else:
return 0 #This points are weird anyway
def read_interesting_regions(self, file_path):
regs = []
try:
regs_file = open(file_path, 'r')
for line in regs_file:
regs.append(line.strip())
except IOError as ioerror:
self.logger.warning("Interesting regions file not found")
return regs # memory inefficient if there's a large number of interesting regions
def plot_enrichment(self, file_path):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
try:
if self.postscript:
import matplotlib
matplotlib.use("PS")
from matplotlib.pyplot import *
from matplotlib import rcParams
rcParams.update({'font.size': 22})
rcParams['legend.fontsize'] = 14
#decide labels
if self.label1:
label_main = self.label1
else:
if self.real_control_path and self.real_experiment_path:
label_main = '%s VS %s'%(os.path.basename(self.real_experiment_path), os.path.basename(self.real_control_path))
else:
label_main = "A VS B"
if self.label2:
label_control = self.label2
else:
if self.replica_path:
label_control = '%s(A) VS %s(A)'%(os.path.basename(self.real_experiment_path), os.path.basename(self.replica_path))
else:
label_control = 'Background distribution'
#self.logger.info("Interesting regions path: %s" % (self.interesting_regions))
interesting_regs = []
if self.interesting_regions:
self.logger.info("Reading interesting regions...")
interesting_regs = read_interesting_regions(self, self.interesting_regions)
#self.logger.info("Interesting regions: %s" % (interesting_regs))
#self.logger.info("Plot path: %s" % (file_path))
interesting_A = []
interesting_M = []
#self.logger.info("disable_significant: %s" % (self.disable_significant_color))
A = []
A_prime = []
M = []
M_significant = []
A_significant = []
M_prime = []
A_medians = []
points = []
minus_points = []
all_points = []
figure(figsize=(14,22))
biggest_A = -sys.maxint #for drawing
smallest_A = sys.maxint #for drawing
biggest_M = 0 #for drawing
self.logger.info("Loading table...")
for line in open(file_path):
sline = line.split()
try:
enrich = dict(zip(enrichment_keys, sline))
# WARNING: for slide inter and slide intra: name2 = 'start:end' (no gene_id, FIXME?)
name2 = enrich['name2'].split(':')
gene_id = name2[0]
if len(name2) >= 2:
transcript_id = name2[1] # consider transcript_id? (exons)
else:
transcript_id = None
if gene_id in interesting_regs or transcript_id in interesting_regs:
interesting_M.append(float(enrich["M"]))
interesting_A.append(float(enrich["A"]))
biggest_A = max(biggest_A, float(enrich["A"]))
smallest_A = min(smallest_A, float(enrich["A"]))
biggest_M = max(biggest_M, abs(float(enrich["M"])))
biggest_A = max(biggest_A, float(enrich["A_prime"]))
smallest_A = min(smallest_A, float(enrich["A_prime"]))
biggest_M = max(biggest_M, abs(float(enrich["M_prime"])))
positive_point = self.zscore*float(enrich["sd"])+float(enrich["mean"])
negative_point = -self.zscore*float(enrich["sd"])+float(enrich["mean"])
A_median = float(enrich["A_median"])
all_points.append((A_median, positive_point, negative_point))
if abs(float(enrich["zscore"])) < self.zscore:
M.append(float(enrich["M"]))
A.append(float(enrich["A"]))
else:
M_significant.append(float(enrich["M"]))
A_significant.append(float(enrich["A"]))
M_prime.append(float(enrich["M_prime"]))
A_prime.append(float(enrich["A_prime"]))
except ValueError:
pass #to skip the header
all_points.sort(key= lambda x:x[0])
for t in all_points:
(A_medians.append(t[0]), points.append(t[1]), minus_points.append(t[2]))
if points:
margin = 1.1
A_medians.append(biggest_A*margin)
points.append(points[-1])
minus_points.append(minus_points[-1])
A_medians.insert(0, smallest_A)
points.insert(0, points[0])
minus_points.insert(0, minus_points[0])
self.logger.info("Plotting points...")
#Background plot
subplot(211, axisbg="lightyellow")
xlabel('Average', fontsize=30)
ylabel('Log2 ratio', fontsize=30)
axis([smallest_A*margin, biggest_A*margin, -biggest_M*margin, biggest_M*margin])
plot(A_prime, M_prime, '.', label=label_control, color = '#666666')
plot(A_medians, points, 'r--', label="Z-score (%s)"%self.zscore)
plot(A_medians, minus_points, 'r--')
axhline(0, linestyle='--', color="grey", alpha=0.75)
leg = legend(fancybox=True, scatterpoints=1, numpoints=1, loc=2, ncol=4, mode="expand")
leg.get_frame().set_alpha(0.5)
#Experiment plot
subplot(212, axisbg="lightyellow")
axis([smallest_A*margin, biggest_A*margin, -biggest_M*margin, biggest_M*margin])
plot(A, M, 'k.', label=label_main)
if self.disable_significant_color:
significant_marker = 'ko'
else:
significant_marker = 'ro'
plot(A_significant, M_significant, significant_marker, label="%s (significant)"%label_main)
plot(A_medians, points, 'r--', label="Z-score (%s)"%self.zscore)
plot(A_medians, minus_points, 'r--')
if self.interesting_regions:
interesting_label = label_main + ' (interesting)'
plot(interesting_A, interesting_M, 'H', label=interesting_label, color='#00EE00') # plotting "interesting" regions
axhline(0, linestyle='--', color="grey", alpha=0.75)
xlabel('Average', fontsize=30)
ylabel('Log2 ratio', fontsize=30)
leg2 = legend(fancybox=True, scatterpoints=1, numpoints=1, loc=2, ncol=4)
leg2.get_frame().set_alpha(0.7)
self._save_figure("enrichment_MA", width=500, height=2800)
else:
self.logger.warning("Nothing to plot.")
except ImportError:
if self.debug:
raise
__matplotlibwarn(self)
def __matplotlibwarn(self):
#FIXME move to utils.py or plotting module
self.logger.warning('Pyicos can not find an installation of matplotlib, so no plot will be drawn. If you want to get a plot with the correlation values, install the matplotlib library.')
def __calc_M(signal_a, signal_b):
return math.log(float(signal_a)/float(signal_b), 2)
def __calc_A(signal_a, signal_b):
return (math.log(float(signal_a), 2)+math.log(float(signal_b), 2))/2
def _calculate_MA(self, region_path, read_counts, factor = 1, replica_factor = 1, file_a_reader=None, file_b_reader=None, replica_reader=None):
tags_a = []
tags_b = []
numreads_background_1 = 0
numreads_background_2 = 0
total_reads_background_1 = 0
total_reads_background_2 = 0
self.logger.debug("Inside _calculate_MA")
self.regions_analyzed_count = 0
enrichment_result = [] #This will hold the name, start and end of the region, plus the A, M, 'A and 'M
if NOWRITE not in self.operations:
out_file = open(self.current_output_path, 'wb')
for region_line in open(region_path):
sline = region_line.split()
region_of_interest = self._region_from_sline(sline)
if region_of_interest:
region_a = None
replica = None
replica_tags = None
signal_a = -1
signal_b = -1
signal_background_1 = -1
signal_background_2 = -1
swap1 = Region()
swap2 = Region()
if read_counts:
signal_a = float(sline[6])
signal_b = float(sline[7])*factor
signal_background_1 = float(sline[8])
signal_background_2 = float(sline[9])*replica_factor
if CHECK_REPLICAS in self.operations:
self.experiment_values.append(signal_background_1)
self.replica_values.append(signal_background_2)
else:
self.logger.debug("Reading tags for %s ..."%region_of_interest)
if self.experiment_format == BAM:
tags_a = len(file_a_reader.get_overlaping_clusters(region_of_interest, overlap=self.overlap))
tags_b = len(file_b_reader.get_overlaping_clusters(region_of_interest, overlap=self.overlap))
else:
tags_a = file_a_reader.get_overlaping_counts(region_of_interest, overlap=self.overlap)
tags_b = file_b_reader.get_overlaping_counts(region_of_interest, overlap=self.overlap)
if self.use_replica:
if self.experiment_format == BAM:
replica_tags = len(replica_reader.get_overlaping_clusters(region_of_interest, overlap=self.overlap))
else:
replica_tags = replica_reader.get_overlaping_counts(region_of_interest, overlap=self.overlap)
self.logger.debug("... done. tags_a: %s tags_b: %s"%(tags_a, tags_b))
#if we are using pseudocounts, use the union, use the intersection otherwise
if (self.pseudocount and (tags_a or tags_b)) or (not self.pseudocount and tags_a and tags_b):
signal_a = region_of_interest.normalized_counts(self.len_norm, self.n_norm, self.total_regions, self.pseudocount, factor, self.total_reads_a, tags_a)
signal_b = region_of_interest.normalized_counts(self.len_norm, self.n_norm, self.total_regions, self.pseudocount, factor, self.total_reads_b, tags_b)
self.already_norm = True
if not self.counts_file:
if (self.pseudocount and (tags_a or tags_b)) or (not self.pseudocount and tags_a and tags_b):
if self.use_replica:
replica = region_of_interest.copy()
#replica.add_tags(replica_tags)
numreads_background_1 = tags_a
numreads_background_2 = replica_tags
total_reads_background_1 = self.total_reads_a
total_reads_background_2 = self.total_reads_replica
signal_background_1 = signal_a
signal_background_2 = region_of_interest.normalized_counts(self.len_norm, self.n_norm, self.total_regions, self.pseudocount,
replica_factor, self.total_reads_replica, replica_tags)
else:
numreads_background_1 = 0
numreads_background_2 = 0
for i in range(0, tags_a+tags_b):
if random.uniform(0,2) > 1:
numreads_background_1 += 1
else:
numreads_background_2 += 1
total_reads_background_1 = total_reads_background_2 = self.average_total_reads
signal_background_1 = region_of_interest.normalized_counts(self.len_norm, self.n_norm, self.total_regions, self.pseudocount,
replica_factor, self.average_total_reads, numreads_background_1)
signal_background_2 = region_of_interest.normalized_counts(self.len_norm, self.n_norm, self.total_regions, self.pseudocount,
replica_factor, self.average_total_reads, numreads_background_2)
#if there is no data in the replica or in the swap and we are not using pseudocounts, dont write the data
if signal_a > 0 and signal_b > 0 and signal_background_1 > 0 and signal_background_2 > 0 or self.use_MA:
if self.use_MA and not self.already_norm:
A = float(sline[10])
M = float(sline[11])
A_prime = float(sline[16])
M_prime = float(sline[17])
else:
if not self.already_norm: #TODO refractor
if self.len_norm: #read per kilobase in region
signal_a = 1e3*(float(signal_a)/len(region_of_interest))
signal_b = 1e3*(float(signal_b)/len(region_of_interest))
signal_background_1 = 1e3*(float(signal_background_1)/len(region_of_interest))
signal_background_2 = 1e3*(float(signal_background_2)/len(region_of_interest))
if self.n_norm: #per million reads in the sample
signal_a = 1e6*(float(signal_a)/self.total_reads_a)
signal_b = 1e6*(float(signal_b)/self.total_reads_b)
if self.use_replica:
signal_background_1 = signal_a
signal_background_2 = 1e6*(float(signal_background_2)/self.total_reads_replica)
else:
signal_background_1 = 1e6*(float(signal_background_1)/self.average_total_reads)
signal_background_2 = 1e6*(float(signal_background_2)/self.average_total_reads)
A = __calc_A(signal_a, signal_b)
M = __calc_M(signal_a, signal_b)
A_prime = __calc_A(signal_background_1, signal_background_2)
M_prime = __calc_M(signal_background_1, signal_background_2)
if CHECK_REPLICAS in self.operations:
self.experiment_values.append(signal_background_1)
self.replica_values.append(signal_background_2)
if NOWRITE not in self.operations:
out_file.write("%s\n"%("\t".join([region_of_interest.write().rstrip("\n"), str(signal_a), str(signal_b), str(signal_background_1), str(signal_background_2), str(A), str(M), str(self.total_reads_a), str(self.total_reads_b), str(tags_a), str(tags_b), str(A_prime), str(M_prime), str(total_reads_background_1), str(total_reads_background_2), str(numreads_background_1), str(numreads_background_2)])))
self.regions_analyzed_count += 1
self.logger.debug("LEAVING _calculate_MA")
if NOWRITE in self.operations:
return ""
else:
out_file.flush()
out_file.close()
# Outputting to HTML (if specified)
if self.html_output is not None:
self.logger.info("Generating HTML")
try:
from jinja2 import Environment, PackageLoader, Markup
except:
self.logger.error("Could not find the jinja2 library")
return out_file.name
loadr = PackageLoader('pyicoteolib', 'templates')
env = Environment(loader=loadr)
template = env.get_template('enrich_html.html')
def jinja_read_file(filename):
f = open(filename, 'r')
#for line in f:
# print line
txt = ''.join(f.readlines())
f.close()
return txt
env.globals['jinja_read_file'] = jinja_read_file
if self.galaxy_workarounds: # Galaxy changes the working directory when outputting multiple files
parent_dir = "./"
else:
parent_dir = os.sep.join(out_file.name.split(os.sep)[0:-1]) + "/"
plot_path = parent_dir + "enrichment_MA_" + out_file.name.split(os.sep)[-1] + ".png"
bed_path = parent_dir + out_file.name.split(os.sep)[-1]
html_file = open(self.html_output, 'w')
html_file.write(template.render({'page_title': 'Enrichment results', 'results_output': jinja_read_file(out_file.name), 'plot_path': plot_path, 'bed_path': bed_path}))
html_file.flush()
html_file.close()
return out_file.name
def _calculate_total_lengths(self):
msg = "Calculating enrichment in regions"
if self.counts_file:
self.sorted_region_path = self.counts_file
if (not self.total_reads_a or not self.total_reads_b or (not self.total_reads_replica and self.use_replica)) and not self.use_MA:
self.logger.info("... counting from counts file...")
self.total_reads_a = 0
self.total_reads_b = 0
if self.total_reads_replica:
self.total_reads_replica = 0
else:
self.total_reads_replica = 1
for line in open(self.counts_file):
try:
enrich = dict(zip(enrichment_keys, line.split()))
self.total_reads_a += float(enrich["signal_a"])
self.total_reads_b += float(enrich["signal_b"])
if self.use_replica:
self.total_reads_replica += float(enrich["signal_prime_2"])
except ValueError:
self.logger.debug("(Counting) skip header...")
else:
self.logger.info("... counting number of lines in files...")
if not self.total_reads_a:
if self.experiment_format == BAM:
self.total_reads_a = bam.size(self.current_experiment_path)
else:
self.total_reads_a = sum(1 for line in utils.open_file(self.current_experiment_path, self.experiment_format, logger=self.logger))
if not self.total_reads_b:
if self.experiment_format == BAM:
self.total_reads_b = bam.size(self.current_control_path)
else:
self.total_reads_b = sum(1 for line in utils.open_file(self.current_control_path, self.control_format, logger=self.logger))
if self.use_replica and not self.total_reads_replica:
if self.experiment_format == BAM:
self.total_reads_replica = bam.size(self.replica_path)
else:
self.total_reads_replica = sum(1 for line in utils.open_file(self.replica_path, self.experiment_format, logger=self.logger))
self.logger.debug("Number lines in experiment A: %s Experiment B: %s"%(self.total_reads_a, self.total_reads_b))
if self.use_replica:
msg = "%s using replicas..."%msg
else:
msg = "%s using swap..."%msg
self.logger.info(msg)
self.average_total_reads = (self.total_reads_a+self.total_reads_b)/2
def enrichment(self):
file_a_reader = file_b_reader = replica_reader = None
self.use_replica = (bool(self.replica_path) or (bool(self.counts_file) and self.use_replica_flag))
self.logger.debug("Use replica: %s"%self.use_replica)
if not USE_MA in self.operations:
_calculate_total_lengths(self)
if not self.counts_file:
file_a_reader = utils.read_fetcher(self.current_experiment_path, self.experiment_format, cached=self.cached, logger=self.logger, use_samtools=self.use_samtools, access_sequential=self.access_sequential, only_counts=True)
file_b_reader = utils.read_fetcher(self.current_control_path, self.experiment_format, cached=self.cached, logger=self.logger, use_samtools=self.use_samtools, access_sequential=self.access_sequential, only_counts=True)
if self.use_replica:
replica_reader = utils.read_fetcher(self.current_replica_path, self.experiment_format, cached=self.cached, logger=self.logger, use_samtools=self.use_samtools, access_sequential=self.access_sequential, only_counts=True)
if self.sorted_region_path:
self.logger.info('Using region file %s (%s)'%(self.region_path, self.region_format))
else:
calculate_region(self) #create region file semi automatically
self.total_regions = sum(1 for line in open(self.sorted_region_path))
self.logger.info("... analyzing regions, calculating normalized counts, A / M and replica or swap...")
self.already_norm = False
if self.use_MA:
ma_path = self.counts_file
else:
ma_path = self.sorted_region_path
out_path = _calculate_MA(self, ma_path, bool(self.counts_file), 1, 1, file_a_reader, file_b_reader, replica_reader)
self.already_norm = True
self.logger.debug("Already normalized: %s"%self.already_norm)
if self.tmm_norm:
if CHECK_REPLICAS in self.operations:
self.experiment_values = []
self.replica_values = []
self.logger.info("TMM Normalizing...")
tmm_factor = calc_tmm_factor(self, out_path, self.regions_analyzed_count, False)
replica_tmm_factor = 1
if self.use_replica:
replica_tmm_factor = calc_tmm_factor(self, out_path, self.regions_analyzed_count, True)
#move output file to old output
#use as input
old_output = '%s/notnormalized_%s'%(self._current_directory(), os.path.basename(self.current_output_path))
move(os.path.abspath(self.current_output_path), old_output)
out_path = _calculate_MA(self, old_output, True, tmm_factor, replica_tmm_factor, True) #recalculate with the new factor, using the counts again
if self.quant_norm:
self.logger.info("Full quantile normalization...")
signal_a = []
signal_prime_1 = []
enrich = []
for line in open(out_path):
sline = line.split()
enrich_line = dict(zip(enrichment_keys, sline))
enrich.append(enrich_line)
signal_a.append(float(enrich_line['signal_a']))
signal_prime_1.append(float(enrich_line['signal_prime_1']))
#full quantile normalization
signal_a.sort()
enrich.sort(key=lambda x:float(x['signal_b']))
quant_counts = open('%s/quantcounts_%s'%(self._current_directory(), os.path.basename(self.current_output_path)), 'w')
for i in range(len(enrich)):
enrich[i]['signal_b'] = signal_a[i]
self.logger.info("Full quantile normalization replica...")
#full quantile normalization of the replica
signal_prime_1.sort()
enrich.sort(key=lambda x:float(x['signal_prime_2']))
for i in range(len(enrich)):
enrich[i]['signal_prime_2'] = signal_prime_1[i]
quant_counts.write("%s\n"%"\t".join(str(enrich[i][key]) for key in enrichment_keys[:20])) #write the lines
quant_counts.flush()
out_path = _calculate_MA(self, quant_counts.name, True, 1, 1, True) #recalculate with the new factor, using the counts again
self._manage_temp_file(quant_counts.name)
self.logger.info("%s regions analyzed."%self.regions_analyzed_count)
if not NOWRITE in self.operations:
self.logger.info("Enrichment result saved to %s"%self.current_output_path)
if CHECK_REPLICAS in self.operations:
check_replica(self)
return out_path
def _sub_tmm(counts_a, counts_b, reads_a, reads_b):
return (counts_a-reads_a)/(counts_a*reads_a) + (counts_b-reads_b)/(counts_b*reads_b)
def calc_tmm_factor(self, file_counts, total_regions, replica):
if replica:
signal_1 = "signal_prime_1"
signal_2 = "signal_prime_2"
M = "M_prime"
reads_2 = self.total_reads_replica
else:
signal_1 = "signal_a"
signal_2 = "signal_b"
M = "M"
reads_2 = self.total_reads_b
values_list = []
#read the file inside the values_list
for line in open(file_counts):
sline = line.split()
values_list.append(dict(zip(enrichment_keys, sline)))
a_trim_number = int(round(total_regions*self.a_trim))
#discard the bad A
self.logger.debug("Removing the worst A (%s regions, %s percent)"%(a_trim_number, self.a_trim*100))
values_list.sort(key=lambda x:float(x["A"])) #sort by A
for i in range (0, a_trim_number):
values_list.pop(0)
values_list.sort(key=lambda x:float(x[M])) #sort by M
m_trim_number = int(round(total_regions*(self.m_trim/2))) #this number is half the value of the flag, because we will trim half below, and half over
#remove on the left
for i in range(0, m_trim_number):
values_list.pop(0)
#remove on the right
for i in range(0, m_trim_number):
values_list.pop(-1)
#now calculate the normalization factor
arriba = 0
abajo = 0
for value in values_list:
w = _sub_tmm(float(value[signal_1]), float(value[signal_2]), self.total_reads_a, reads_2)
arriba += w*float(value[M])
abajo += w
try:
factor = 2**(arriba/abajo)
except ZeroDivisionError:
self.logger.warning("Division by zero, TMM factor could not be calculated.")
factor = 1
if replica:
self.logger.info("Replica TMM Normalization Factor: %s"%factor)
else:
self.logger.info("TMM Normalization Factor: %s"%factor)
return factor
def __load_enrichment_result(values_path):
ret = []
for line in open(values_path):
sline = line.split()
try:
float(sline[1])
ret.append(dict(zip(enrichment_keys, sline)))
except ValueError:
pass
return ret
def calculate_zscore(self, values_path):
num_regions = sum(1 for line in open(values_path))
bin_size = int(self.binsize*num_regions)
if bin_size < 50:
self.logger.warning("The bin size results in a sliding window smaller than 50, adjusting window to 50 in order to get statistically meaningful results.")
bin_size = 50
bin_step = max(1, int(round(self.bin_step*bin_size)))
self.logger.info("Enrichment window calculation using a sliding window size of %s, sliding with a step of %s"%(bin_size, bin_step))
self.logger.info("... calculating zscore...")
enrichment_result = __load_enrichment_result(values_path)
enrichment_result.sort(key= lambda x:(float(x["A_prime"])))
self.logger.debug("Number of loaded counts: %s"%len(enrichment_result))
self.points = []
#get the standard deviations
for i in range(0, num_regions-bin_size+bin_step, bin_step):
#get the slice
if i+bin_size < num_regions:
result_chunk = enrichment_result[i:i+bin_size]
else:
result_chunk = enrichment_result[i:] #last chunk
#retrieve the values
mean_acum = 0
a_acum = 0
Ms_replica = []
for entry in result_chunk:
mean_acum += float(entry["M_prime"])
a_acum += float(entry["A_prime"])
Ms_replica.append(float(entry["M_prime"]))
#add them to the points of mean and sd
mean = mean_acum/len(result_chunk)
sd = math.sqrt((sum((x - mean)**2 for x in Ms_replica))/len(Ms_replica))
#the A median
A_median = a_acum / len(result_chunk)
self.points.append([A_median, mean, sd]) #The A asigned to the window, the mean and the standard deviation
#self.logger.debug("Window of %s length, with A median: %s mean: %s sd: %s"%(len(result_chunk), self.points[-1][0], self.points[-1][1], self.points[-1][2], len(self.points)))
#update z scores
for entry in enrichment_result:
entry["A_median"] = 0
entry["mean"] = 0
entry["sd"] = 0
entry["zscore"] = 0
closest_A = sys.maxint
sd_position = 0
for i in range(0, len(self.points)):
new_A = self.points[i][0]
if new_A != closest_A: #skip repeated points
if abs(closest_A - float(entry["A"])) >= abs(new_A - float(entry["A"])):
closest_A = new_A
sd_position = i
else:
break #already found, no need to go further since the points are ordered
entry["A_median"] = closest_A
if self.points: #only calculate if there where windows...
__sub_zscore(self.sdfold, entry, self.points[sd_position])
if not self.points: # ... otherwise give a warning
self.logger.warning("Insufficient number of regions analyzed (%s), z-score values could not be calculated"%num_regions)
enrichment_result.sort(key=lambda x:(x["name"], int(x["start"]), int(x["end"])))
old_file_path = '%s/before_zscore_%s'%(self._current_directory(), os.path.basename(values_path)) #create path for the outdated file
move(os.path.abspath(values_path), old_file_path) #move the file
new_file = file(values_path, 'w') #open a new file in the now empty file space
if not self.skip_header:
new_file.write('\t'.join(enrichment_keys))
new_file.write('\n')
for entry in enrichment_result:
new_file.write("\t".join(str(entry[key]) for key in enrichment_keys)+"\n")
self._manage_temp_file(old_file_path)
return values_path
def __sub_zscore(sdfold, entry, point):
entry["mean"] = str(point[1])
entry["sd"] = str(point[2])
entry["zscore"] = str(get_zscore(float(entry["M"]), float(entry["mean"]), sdfold*float(entry["sd"])))
def check_replica(self):
#discard everything below the flag
new_experiment = []
new_replica = []
min_value = sys.maxint
max_value = -sys.maxint
for i in range(len(self.replica_values)):
if self.experiment_values[i] > self.count_filter and self.replica_values[i] > self.count_filter:
new_experiment.append(math.log(self.experiment_values[i], 2))
new_replica.append(math.log(self.replica_values[i], 2))
min_value = min(min_value, math.log(self.experiment_values[i], 2), math.log(self.replica_values[i], 2))
max_value = max(max_value, math.log(self.experiment_values[i], 2), math.log(self.replica_values[i], 2))
#print self.replica_values
self.experiment_values = new_experiment
self.replica_values = new_replica
try:
if self.postscript:
import matplotlib
matplotlib.use("PS")
from matplotlib.pyplot import plot, show, xlabel, ylabel, axhline, axis, clf, text, title, xlim, ylim
except:
__matplotlibwarn(self)
return 0
clf()
r_squared = utils.pearson(self.experiment_values, self.replica_values)**2
text(min_value+abs(max_value)*0.1, max_value-abs(max_value)*0.2, r'Pearson $R^2$= %s'%round(r_squared, 3), fontsize=18, bbox={'facecolor':'yellow', 'alpha':0.5, 'pad':10})
xlabel("log2(%s)"%self.experiment_label, fontsize=18)
ylabel("log2(%s)"%self.replica_label, fontsize=18)
xlim(min_value, max_value)
ylim(min_value, max_value)
title(self.title_label, fontsize=24)
plot(self.experiment_values, self.replica_values, '.')
self._save_figure("check_replica")
def check_replica_correlation(self):
"No usado, de momento"
min_tags = 20
experiment_reader = utils.read_fetcher(self.current_experiment_path, self.experiment_format, cached=self.cached, logger=self.logger, use_samtools=self.use_samtools, access_sequential=self.access_sequential)
replica_reader = utils.read_fetcher(self.current_replica_path, self.experiment_format, cached=self.cached, logger=self.logger, use_samtools=self.use_samtools, access_sequential=self.access_sequential)
correlations_acum = 0
num_correlations = 0
for region_line in open(self.region_path):
sline = region_line.split()
region_experiment = self._region_from_sline(sline)
region_replica = region_experiment.copy()
tags_experiment = experiment_reader.get_overlaping_clusters(region_experiment, overlap=1)
tags_replica = replica_reader.get_overlaping_clusters(region_experiment, overlap=1)
count_experiment = len(tags_experiment)
count_replica = len(tags_replica)
correlations = []
if count_experiment+count_replica > min_tags:
region_experiment.add_tags(tags_experiment, clusterize=True)
region_replica.add_tags(tags_replica, clusterize=True)
num_correlations += 1
correlation = utils.pearson(region_experiment.get_array(), region_replica.get_array())
correlations_acum += max(0, correlation)
correlations.append(correlation)
print correlations_acum/num_correlations
try:
if self.postscript:
import matplotlib
matplotlib.use("PS")
from matplotlib.pyplot import plot, boxplot, show, legend, figure, xlabel, ylabel, subplot, axhline, axis
except:
__matplotlibwarn(self)
return 0
print correlations
boxplot(correlations)
self._save_figure("check_replica")
|
gpl-3.0
| 5,883,346,573,406,230,000
| 46.64218
| 418
| 0.583999
| false
|
excelly/xpy-ml
|
sdss_iii/proc_transform_data.py
|
1
|
4304
|
from ex import *
from ex.ioo.FITS import FITS
import multiprocessing as mp
import sdss.utils as utils
import sdss_iii.settings as settings
def usage():
print('''
transform the data into standard form
python proc_transform_data.py --input_files={input_files} --npixel=[500] --nproc=[1]
''')
sys.exit(1)
def Filter(sf, vf):
'''filter out bad objects
'''
n, dim = vf['spectrum'].shape
#### filter by bad pixels
sd = float32(1/np.sqrt(np.maximum(vf['invvar'], 1e-6)))
filt_pix = (sd > settings.bad_pixel_std_thresh).sum(1) < dim*settings.bad_pixel_num_thresh
#### filter by s2n
# stars
filt_star = sf['spec_cln'] == 1
n_star = filt_star.sum()
filt_star = reduce(AND, [filt_pix, filt_star, sf['s2n'] >= 3])
# galaxy
filt_gla = sf['spec_cln'] == 2
n_gla = filt_gla.sum()
filt_gla = reduce(AND, [filt_pix, filt_gla, sf['s2n'] >= 10])
# qso
filt_qso = sf['spec_cln'] == 3
n_qso = filt_qso.sum()
filt_qso = reduce(AND, [filt_pix, filt_qso, sf['s2n'] >= 10])
log.info('''
Selected
%d / %d stars
%d / %d galaxies
%d / %d quasars
''' % (filt_star.sum(), n_star,
filt_gla.sum(), n_gla,
filt_qso.sum(), n_qso))
return reduce(OR, [filt_star, filt_gla, filt_qso])
def ResampleSpectrum(y_np):
y, npixel = y_np
return utils.SplineResample(y, npixel)
def main(input_files, npixel=500, nproc=1):
input_files = ExpandWildcard(input_files)
MakeDir('./compact')
log.info("Transforming {0} SDSS-III files using {1} processes. Output=./compact/".format(len(input_files), nproc))
pool = mp.Pool(nproc)
for input_file in input_files:
output_file = "./compact/{0}.pkl".format(
SplitFilename(input_file)[0])
if os.path.exists(output_file):
log.info('Already processed {0}'.format(input_file))
continue
log.info("Processing %s -> %s" % (input_file,output_file))
fits = FITS(input_file)
vf = {'spectrum': FixEndian(fits.HDUs[0].data),
'invvar': FixEndian(fits.HDUs[4].data)}
log10_wl = FixEndian(fits.HDUs[3].data)
sf = dict([(name, FixEndian(fits.HDUs[1].data.field(name)))
for name in fits.HDUs[1].data.names])
del sf['length']
sf['mag'] = FixEndian(fits.HDUs[2].data)
sf['spec_cln'] = arr(EncodeList(
[c.strip().lower() for c in sf['class']],
settings.spec_cln_code.keys(),
settings.spec_cln_code.values()))
sf['PMF'] = utils.PMF_S2N(sf['plate'],sf['mjd'],sf['fiber'])
sf['stamp'] = zeros(len(vf['spectrum']), dtype = np.int64)
sf['stamp'][:] = fits.HDUs[1].header['stamp']
log.info("The following scalar features found: \n{0}".format(
sf.keys()))
filt = Filter(sf, vf)
for key in sf.keys():
sf[key] = sf[key][filt]
for key in vf.keys():
vf[key] = vf[key][filt]
log.info("%d / %d objects left after filtering" % (
filt.sum(), filt.size))
log.info('Resampling %d spectra %d -> %d...'%(
len(vf['spectrum']), vf['spectrum'].shape[1], npixel))
jobs = [(spec, npixel) for spec in vf['spectrum']]
spectrum = pool.map(ResampleSpectrum, jobs)
log.info('Resampling %d invvar...'%len(vf['invvar']))
jobs = [(iv, npixel) for iv in vf['invvar']]
invvar = pool.map(ResampleSpectrum, jobs)
log10_wl = linspace(log10_wl.min(), log10_wl.max(), npixel)
# from ex.plott import *
# h = figure();
# subplot(h,211); plot(vf['spectrum'][0])
# subplot(h,212); plot(spectrum[0])
# show()
vf['spectrum'] = spectrum
vf['invvar'] = invvar
log.info('Saving %s...'%output_file)
SavePickle(output_file, {'SF': sf, 'VF': vf,
'log10_wl': log10_wl})
fits.Close()
if __name__ == '__main__':
InitLog()
opts = CmdArgs(sys.argv[1:],
['nproc=','input_files=','npixel='],
usage)
input_files = opts.get('--input_files')
nproc = int(opts.get('--nproc', 1))
npixel = int(opts.get('--npixel', 500))
main(input_files, npixel, nproc)
|
apache-2.0
| -8,651,753,374,808,732,000
| 31.360902
| 118
| 0.549024
| false
|
mgagne/nova
|
nova/tests/unit/api/openstack/compute/contrib/test_quota_classes.py
|
1
|
6260
|
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import webob
from nova.api.openstack.compute.contrib import quota_classes
from nova.api.openstack.compute import plugins
from nova.api.openstack.compute.plugins.v3 import quota_classes \
as quota_classes_v21
from nova.api.openstack import extensions
from nova import test
from nova.tests.unit.api.openstack import fakes
def quota_set(class_name):
return {'quota_class_set': {'id': class_name, 'metadata_items': 128,
'ram': 51200, 'floating_ips': 10,
'fixed_ips': -1, 'instances': 10,
'injected_files': 5, 'cores': 20,
'injected_file_content_bytes': 10240,
'security_groups': 10,
'security_group_rules': 20, 'key_pairs': 100,
'injected_file_path_bytes': 255}}
class QuotaClassSetsTestV21(test.TestCase):
def setUp(self):
super(QuotaClassSetsTestV21, self).setUp()
self.req_admin = fakes.HTTPRequest.blank('', use_admin_context=True)
self.req = fakes.HTTPRequest.blank('')
self._setup()
def _setup(self):
ext_info = plugins.LoadedExtensionInfo()
self.controller = quota_classes_v21.QuotaClassSetsController(
extension_info=ext_info)
def test_format_quota_set(self):
raw_quota_set = {
'instances': 10,
'cores': 20,
'ram': 51200,
'floating_ips': 10,
'fixed_ips': -1,
'metadata_items': 128,
'injected_files': 5,
'injected_file_path_bytes': 255,
'injected_file_content_bytes': 10240,
'security_groups': 10,
'security_group_rules': 20,
'key_pairs': 100,
}
quota_set = self.controller._format_quota_set('test_class',
raw_quota_set)
qs = quota_set['quota_class_set']
self.assertEqual(qs['id'], 'test_class')
self.assertEqual(qs['instances'], 10)
self.assertEqual(qs['cores'], 20)
self.assertEqual(qs['ram'], 51200)
self.assertEqual(qs['floating_ips'], 10)
self.assertEqual(qs['fixed_ips'], -1)
self.assertEqual(qs['metadata_items'], 128)
self.assertEqual(qs['injected_files'], 5)
self.assertEqual(qs['injected_file_path_bytes'], 255)
self.assertEqual(qs['injected_file_content_bytes'], 10240)
self.assertEqual(qs['security_groups'], 10)
self.assertEqual(qs['security_group_rules'], 20)
self.assertEqual(qs['key_pairs'], 100)
def test_quotas_show_as_admin(self):
res_dict = self.controller.show(self.req_admin, 'test_class')
self.assertEqual(res_dict, quota_set('test_class'))
def test_quotas_show_as_unauthorized_user(self):
self.assertRaises(webob.exc.HTTPForbidden, self.controller.show,
self.req, 'test_class')
def test_quotas_update_as_admin(self):
body = {'quota_class_set': {'instances': 50, 'cores': 50,
'ram': 51200, 'floating_ips': 10,
'fixed_ips': -1, 'metadata_items': 128,
'injected_files': 5,
'injected_file_content_bytes': 10240,
'injected_file_path_bytes': 255,
'security_groups': 10,
'security_group_rules': 20,
'key_pairs': 100}}
res_dict = self.controller.update(self.req_admin, 'test_class', body)
self.assertEqual(res_dict, body)
def test_quotas_update_as_user(self):
body = {'quota_class_set': {'instances': 50, 'cores': 50,
'ram': 51200, 'floating_ips': 10,
'fixed_ips': -1, 'metadata_items': 128,
'injected_files': 5,
'injected_file_content_bytes': 10240,
'security_groups': 10,
'security_group_rules': 20,
'key_pairs': 100,
}}
self.assertRaises(webob.exc.HTTPForbidden, self.controller.update,
self.req, 'test_class', body)
def test_quotas_update_with_empty_body(self):
body = {}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req_admin, 'test_class', body)
def test_quotas_update_with_non_integer(self):
body = {'quota_class_set': {'instances': "abc"}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req_admin, 'test_class', body)
body = {'quota_class_set': {'instances': 50.5}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req_admin, 'test_class', body)
body = {'quota_class_set': {
'instances': u'\u30aa\u30fc\u30d7\u30f3'}}
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
self.req_admin, 'test_class', body)
class QuotaClassSetsTestV2(QuotaClassSetsTestV21):
def _setup(self):
ext_mgr = extensions.ExtensionManager()
ext_mgr.extensions = {}
self.controller = quota_classes.QuotaClassSetsController(ext_mgr)
|
apache-2.0
| 2,523,223,996,149,545,500
| 41.297297
| 78
| 0.548722
| false
|
pika/pika
|
pika/frame.py
|
1
|
7744
|
"""Frame objects that do the frame demarshaling and marshaling."""
import logging
import struct
from pika import amqp_object
from pika import exceptions
from pika import spec
from pika.compat import byte
LOGGER = logging.getLogger(__name__)
class Frame(amqp_object.AMQPObject):
"""Base Frame object mapping. Defines a behavior for all child classes for
assignment of core attributes and implementation of the a core _marshal
method which child classes use to create the binary AMQP frame.
"""
NAME = 'Frame'
def __init__(self, frame_type, channel_number):
"""Create a new instance of a frame
:param int frame_type: The frame type
:param int channel_number: The channel number for the frame
"""
self.frame_type = frame_type
self.channel_number = channel_number
def _marshal(self, pieces):
"""Create the full AMQP wire protocol frame data representation
:rtype: bytes
"""
payload = b''.join(pieces)
return struct.pack('>BHI', self.frame_type, self.channel_number,
len(payload)) + payload + byte(spec.FRAME_END)
def marshal(self):
"""To be ended by child classes
:raises NotImplementedError
"""
raise NotImplementedError
class Method(Frame):
"""Base Method frame object mapping. AMQP method frames are mapped on top
of this class for creating or accessing their data and attributes.
"""
NAME = 'METHOD'
def __init__(self, channel_number, method):
"""Create a new instance of a frame
:param int channel_number: The frame type
:param pika.Spec.Class.Method method: The AMQP Class.Method
"""
Frame.__init__(self, spec.FRAME_METHOD, channel_number)
self.method = method
def marshal(self):
"""Return the AMQP binary encoded value of the frame
:rtype: str
"""
pieces = self.method.encode()
pieces.insert(0, struct.pack('>I', self.method.INDEX))
return self._marshal(pieces)
class Header(Frame):
"""Header frame object mapping. AMQP content header frames are mapped
on top of this class for creating or accessing their data and attributes.
"""
NAME = 'Header'
def __init__(self, channel_number, body_size, props):
"""Create a new instance of a AMQP ContentHeader object
:param int channel_number: The channel number for the frame
:param int body_size: The number of bytes for the body
:param pika.spec.BasicProperties props: Basic.Properties object
"""
Frame.__init__(self, spec.FRAME_HEADER, channel_number)
self.body_size = body_size
self.properties = props
def marshal(self):
"""Return the AMQP binary encoded value of the frame
:rtype: str
"""
pieces = self.properties.encode()
pieces.insert(
0, struct.pack('>HxxQ', self.properties.INDEX, self.body_size))
return self._marshal(pieces)
class Body(Frame):
"""Body frame object mapping class. AMQP content body frames are mapped on
to this base class for getting/setting of attributes/data.
"""
NAME = 'Body'
def __init__(self, channel_number, fragment):
"""
Parameters:
- channel_number: int
- fragment: unicode or str
"""
Frame.__init__(self, spec.FRAME_BODY, channel_number)
self.fragment = fragment
def marshal(self):
"""Return the AMQP binary encoded value of the frame
:rtype: str
"""
return self._marshal([self.fragment])
class Heartbeat(Frame):
"""Heartbeat frame object mapping class. AMQP Heartbeat frames are mapped
on to this class for a common access structure to the attributes/data
values.
"""
NAME = 'Heartbeat'
def __init__(self):
"""Create a new instance of the Heartbeat frame"""
Frame.__init__(self, spec.FRAME_HEARTBEAT, 0)
def marshal(self):
"""Return the AMQP binary encoded value of the frame
:rtype: str
"""
return self._marshal(list())
class ProtocolHeader(amqp_object.AMQPObject):
"""AMQP Protocol header frame class which provides a pythonic interface
for creating AMQP Protocol headers
"""
NAME = 'ProtocolHeader'
def __init__(self, major=None, minor=None, revision=None):
"""Construct a Protocol Header frame object for the specified AMQP
version
:param int major: Major version number
:param int minor: Minor version number
:param int revision: Revision
"""
self.frame_type = -1
self.major = major or spec.PROTOCOL_VERSION[0]
self.minor = minor or spec.PROTOCOL_VERSION[1]
self.revision = revision or spec.PROTOCOL_VERSION[2]
def marshal(self):
"""Return the full AMQP wire protocol frame data representation of the
ProtocolHeader frame
:rtype: str
"""
return b'AMQP' + struct.pack('BBBB', 0, self.major, self.minor,
self.revision)
def decode_frame(data_in): # pylint: disable=R0911,R0914
"""Receives raw socket data and attempts to turn it into a frame.
Returns bytes used to make the frame and the frame
:param str data_in: The raw data stream
:rtype: tuple(bytes consumed, frame)
:raises: pika.exceptions.InvalidFrameError
"""
# Look to see if it's a protocol header frame
try:
if data_in[0:4] == b'AMQP':
major, minor, revision = struct.unpack_from('BBB', data_in, 5)
return 8, ProtocolHeader(major, minor, revision)
except (IndexError, struct.error):
return 0, None
# Get the Frame Type, Channel Number and Frame Size
try:
(frame_type, channel_number, frame_size) = struct.unpack(
'>BHL', data_in[0:7])
except struct.error:
return 0, None
# Get the frame data
frame_end = spec.FRAME_HEADER_SIZE + frame_size + spec.FRAME_END_SIZE
# We don't have all of the frame yet
if frame_end > len(data_in):
return 0, None
# The Frame termination chr is wrong
if data_in[frame_end - 1:frame_end] != byte(spec.FRAME_END):
raise exceptions.InvalidFrameError("Invalid FRAME_END marker")
# Get the raw frame data
frame_data = data_in[spec.FRAME_HEADER_SIZE:frame_end - 1]
if frame_type == spec.FRAME_METHOD:
# Get the Method ID from the frame data
method_id = struct.unpack_from('>I', frame_data)[0]
# Get a Method object for this method_id
method = spec.methods[method_id]()
# Decode the content
method.decode(frame_data, 4)
# Return the amount of data consumed and the Method object
return frame_end, Method(channel_number, method)
elif frame_type == spec.FRAME_HEADER:
# Return the header class and body size
class_id, weight, body_size = struct.unpack_from('>HHQ', frame_data)
# Get the Properties type
properties = spec.props[class_id]()
# Decode the properties
out = properties.decode(frame_data[12:])
# Return a Header frame
return frame_end, Header(channel_number, body_size, properties)
elif frame_type == spec.FRAME_BODY:
# Return the amount of data consumed and the Body frame w/ data
return frame_end, Body(channel_number, frame_data)
elif frame_type == spec.FRAME_HEARTBEAT:
# Return the amount of data and a Heartbeat frame
return frame_end, Heartbeat()
raise exceptions.InvalidFrameError("Unknown frame type: %i" % frame_type)
|
bsd-3-clause
| -8,649,238,038,116,597,000
| 28.333333
| 78
| 0.631586
| false
|
RyanSkraba/beam
|
sdks/python/apache_beam/coders/typecoders.py
|
1
|
8078
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Type coders registration.
This module contains functionality to define and use coders for custom classes.
Let's say we have a class Xyz and we are processing a PCollection with elements
of type Xyz. If we do not register a coder for Xyz, a default pickle-based
fallback coder will be used. This can be undesirable for two reasons. First, we
may want a faster coder or a more space efficient one. Second, the pickle-based
coder is not deterministic in the sense that objects like dictionaries or sets
are not guaranteed to be encoded in the same way every time (elements are not
really ordered).
Two (sometimes three) steps are needed to define and use a custom coder:
- define the coder class
- associate the code with the class (a.k.a. coder registration)
- typehint DoFns or transforms with the new class or composite types using
the class.
A coder class is defined by subclassing from CoderBase and defining the
encode_to_bytes and decode_from_bytes methods. The framework uses duck-typing
for coders so it is not strictly required to subclass from CoderBase as long as
the encode/decode methods are defined.
Registering a coder class is made with a register_coder() call::
from apache_beam import coders
...
coders.registry.register_coder(Xyz, XyzCoder)
Additionally, DoFns and PTransforms may need type hints. This is not always
necessary since there is functionality to infer the return types of DoFns by
analyzing the code. For instance, for the function below the return type of
'Xyz' will be inferred::
def MakeXyzs(v):
return Xyz(v)
If Xyz is inferred then its coder will be used whenever the framework needs to
serialize data (e.g., writing to the shuffler subsystem responsible for group by
key operations). If a typehint is needed it can be specified by decorating the
DoFns or using with_input_types/with_output_types methods on PTransforms. For
example, the above function can be decorated::
@with_output_types(Xyz)
def MakeXyzs(v):
return complex_operation_returning_Xyz(v)
See apache_beam.typehints.decorators module for more details.
"""
from __future__ import absolute_import
from builtins import object
from typing import Any
from typing import Dict
from typing import Iterable
from typing import List
from typing import Type
from past.builtins import unicode
from apache_beam.coders import coders
from apache_beam.typehints import typehints
__all__ = ['registry']
class CoderRegistry(object):
"""A coder registry for typehint/coder associations."""
def __init__(self, fallback_coder=None):
self._coders = {} # type: Dict[Any, Type[coders.Coder]]
self.custom_types = [] # type: List[Any]
self.register_standard_coders(fallback_coder)
def register_standard_coders(self, fallback_coder):
"""Register coders for all basic and composite types."""
self._register_coder_internal(int, coders.VarIntCoder)
self._register_coder_internal(float, coders.FloatCoder)
self._register_coder_internal(bytes, coders.BytesCoder)
self._register_coder_internal(bool, coders.BooleanCoder)
self._register_coder_internal(unicode, coders.StrUtf8Coder)
self._register_coder_internal(typehints.TupleConstraint, coders.TupleCoder)
# Default fallback coders applied in that order until the first matching
# coder found.
default_fallback_coders = [coders.ProtoCoder, coders.FastPrimitivesCoder]
self._fallback_coder = fallback_coder or FirstOf(default_fallback_coders)
def _register_coder_internal(self, typehint_type, typehint_coder_class):
# type: (Any, Type[coders.Coder]) -> None
self._coders[typehint_type] = typehint_coder_class
def register_coder(self, typehint_type, typehint_coder_class):
# type: (Any, Type[coders.Coder]) -> None
if not isinstance(typehint_coder_class, type):
raise TypeError('Coder registration requires a coder class object. '
'Received %r instead.' % typehint_coder_class)
if typehint_type not in self.custom_types:
self.custom_types.append(typehint_type)
self._register_coder_internal(typehint_type, typehint_coder_class)
def get_coder(self, typehint):
# type: (Any) -> coders.Coder
coder = self._coders.get(
typehint.__class__ if isinstance(typehint, typehints.TypeConstraint)
else typehint, None)
if isinstance(typehint, typehints.TypeConstraint) and coder is not None:
return coder.from_type_hint(typehint, self)
if coder is None:
# We use the fallback coder when there is no coder registered for a
# typehint. For example a user defined class with no coder specified.
if not hasattr(self, '_fallback_coder'):
raise RuntimeError(
'Coder registry has no fallback coder. This can happen if the '
'fast_coders module could not be imported.')
if isinstance(typehint, (typehints.IterableTypeConstraint,
typehints.ListConstraint)):
return coders.IterableCoder.from_type_hint(typehint, self)
elif typehint is None:
# In some old code, None is used for Any.
# TODO(robertwb): Clean this up.
pass
elif typehint is object or typehint == typehints.Any:
# We explicitly want the fallback coder.
pass
elif isinstance(typehint, typehints.TypeVariable):
# TODO(robertwb): Clean this up when type inference is fully enabled.
pass
else:
# TODO(robertwb): Re-enable this warning when it's actionable.
# warnings.warn('Using fallback coder for typehint: %r.' % typehint)
pass
coder = self._fallback_coder
return coder.from_type_hint(typehint, self)
def get_custom_type_coder_tuples(self, types):
"""Returns type/coder tuples for all custom types passed in."""
return [(t, self._coders[t]) for t in types if t in self.custom_types]
def verify_deterministic(self, key_coder, op_name, silent=True):
if not key_coder.is_deterministic():
error_msg = ('The key coder "%s" for %s '
'is not deterministic. This may result in incorrect '
'pipeline output. This can be fixed by adding a type '
'hint to the operation preceding the GroupByKey step, '
'and for custom key classes, by writing a '
'deterministic custom Coder. Please see the '
'documentation for more details.' % (key_coder, op_name))
return key_coder.as_deterministic_coder(op_name, error_msg)
else:
return key_coder
class FirstOf(object):
"""For internal use only; no backwards-compatibility guarantees.
A class used to get the first matching coder from a list of coders."""
def __init__(self, coders):
# type: (Iterable[Type[coders.Coder]]) -> None
self._coders = coders
def from_type_hint(self, typehint, registry):
messages = []
for coder in self._coders:
try:
return coder.from_type_hint(typehint, self)
except Exception as e:
msg = ('%s could not provide a Coder for type %s: %s' %
(coder, typehint, e))
messages.append(msg)
raise ValueError('Cannot provide coder for %s: %s' %
(typehint, ';'.join(messages)))
registry = CoderRegistry()
|
apache-2.0
| -3,578,803,315,472,674,000
| 41.072917
| 80
| 0.709953
| false
|
afmurillo/FlowFence
|
FlowMonitor_3.py
|
1
|
19291
|
""" Module that monitors the average network interface occupation """
import subprocess
from collections import deque
import threading
import application_switch_3
import SwitchProperties
import time
class FlowMonitor_3:
""" Class that monitors network interface occupation """
def __init__(self, samples=10, period=3, interval_time=1.0, upper_limit=10*0.8, lower_limit=10*0.6):
self.n_samples = samples
self.period = period
self.interval_time = interval_time
self.switch_properties = SwitchProperties.SwitchProperties()
self.interfaces_list = self.switch_properties.get_interfaces()
self.complete_interface_list = []
self.old_queue_list = []
self.queues_ids = []
self.qos_register = dict.fromkeys(['uuid','port', 'id', 'min-rate', 'max-rate'] )
self.lock = threading.Lock()
for i in range(len(self.interfaces_list)):
complete_interface_dict = dict.fromkeys(['name', 'dpid', 'capacity', 'lower_limit', 'upper_limit', 'threshold', 'samples', 'use_averages', 'monitoring', 'is_congested', 'queueList'])
complete_interface_dict['name'] = self.interfaces_list[i]['name']
complete_interface_dict['dpid'] = self.interfaces_list[i]['dpid']
complete_interface_dict['capacity'] = self.interfaces_list[i]['capacity']
complete_interface_dict['lower_limit'] = lower_limit
complete_interface_dict['upper_limit'] = upper_limit
complete_interface_dict['threshold'] = upper_limit
complete_interface_dict['samples'] = []
complete_interface_dict['prevEma'] = 0
complete_interface_dict['currentEma'] = 0
complete_interface_dict['use_averages'] = 0
complete_interface_dict['monitoring'] = 0
complete_interface_dict['is_congested'] = 0
complete_interface_dict['queueList'] = []
self.complete_interface_list.append(complete_interface_dict)
for i in range(len(self.complete_interface_list)):
self.complete_interface_list[i]['use_averages'] = deque( maxlen=self.n_samples )
#Control variables
self.threads_id = []
self.init_window()
def reset_queues(self):
""" Clears QoS queues in all interfaces """
for i in range(len(self.complete_interface_list)):
subprocess.check_output('ovs-ofctl del-flows ' + self.complete_interface_list[i]['name'], shell=True)
subprocess.check_output('./clear_queues.sh ', shell=True)
def init_window(self):
""" Inits samples window """
for j in range(len(self.complete_interface_list)):
for i in range(self.n_samples):
self.complete_interface_list[j]['use_averages'].append(0)
for i in range(self.n_samples):
#sample list of dicts, each dict has ['name']['sample']
result = self.get_sample()
for j in range(len(self.complete_interface_list)):
last_samples = result[j]['sample']
self.complete_interface_list[j]['use_averages'].popleft()
self.complete_interface_list[j]['use_averages'].append(last_samples)
if i == 0:
self.complete_interface_list[j]['prevema'] = last_samples
for j in range(len(self.complete_interface_list)):
for a_bar in enumerate(self.complete_interface_list[j]['use_averages']):
self.complete_interface_list[j]['currentEma'] = self.ema(a_bar, self.complete_interface_list[j]['use_averages'], self.period, self.complete_interface_list[j]['prevEma'], smoothing=None)
self.complete_interface_list[j]['prevEma'] = self.complete_interface_list[j]['currentEma']
def update_window(self):
""" Updates the sample window """
for i in range(self.n_samples):
# Sample list of dicts, each dict has ['name']['sample']
result = self.get_sample() # < ---- GOTTA CHECK THIS
last_samples=0
for j in range(len(self.complete_interface_list)):
last_samples = result[j]['sample']
self.complete_interface_list[j]['use_averages'].popleft()
self.complete_interface_list[j]['use_averages'].append(last_samples)
for j in range(len(self.complete_interface_list)):
if i == 0:
self.complete_interface_list[j]['prevema'] = last_samples
for a_bar in enumerate(self.complete_interface_list[j]['use_averages']):
self.complete_interface_list[j]['currentEma'] = self.ema(a_bar, self.complete_interface_list[j]['use_averages'], self.period, self.complete_interface_list[j]['prevEma'], smoothing=None)
self.complete_interface_list[j]['prevEma'] = self.complete_interface_list[j]['currentEma']
def start_monitoring(self):
""" Starts the thread that monitors interface occupation """
self.report_object = application_switch_3.ApplicationSwitch()
self.monitoring=1
self.threads_id.append(threading.Thread(name = 'Monitor', target=self.monitor))
self.threads_id[0].start()
def stop_monitoring(self):
""" Stops monitoring the output interface """
self.monitoring=0
#toDo: Handle
def congestion_stopped(self):
""" Unused """
self.is_congested=0
def monitor(self):
""" Obtains a new sample of the interface occupation average, and in case of congestion, notifies the main module """
self.startup_time = time.time()
while True:
if self.monitoring == 1:
try:
self.update_window()
for j in range(len(self.complete_interface_list)):
#print "update, ema: " + str(self.complete_interface_list[j]['currentEma'])
#print "current threshold: " + str(self.complete_interface_list[j]['threshold'])
if (self.complete_interface_list[j]['is_congested'] == 0) and (self.complete_interface_list[j]['currentEma'] >= self.complete_interface_list[j]['threshold']):
#print "Congested"
self.detection_time = time.time()
self.complete_interface_list[j]['threshold'] = self.complete_interface_list[j]['lower_limit']
self.monitoring = 0
self.report_object.congestion_detected(self.complete_interface_list[j])
elif (self.complete_interface_list[j]['is_congested'] == 1) and (self.complete_interface_list[j]['currentEma'] <= self.complete_interface_list[j]['threshold']):
self.complete_interface_list[j]['is_congested'] = 0
self.complete_interface_list[j]['threshold'] = self.complete_interface_list[j]['upper_limit']
#print "Congestion ceased"
self.report_object.congestion_ceased()
except KeyboardInterrupt:
print " \n *** So long and thanks for all the fish! *** "
self.monitoring = 0
break
def clear_queues(self, controller_message):
subprocess.check_output('./clear_queues.sh ', shell=True)
del self.old_queue_list[:]
self.qos_register['uuid'] = None
del self.queues_ids[:]
def update_queues(self, controller_message):
""" Updates the QoS queues, one queue is created for each flow """
# Here we should:
# 0. Check if register QoS exists, if not create it
# 1. Compare the received queue list with the previous one and:
# 1a. If there are new elements, create and attach a queue for them
# 1b. If there are deleted elements, delete the queue
# 1c. If there are elements with different bw, update it
# 2. Store the old queue list as the updated one
# 3. Notify the controller about the queue completion
# Queues are at (controller_message['bw_list'])
self.lock.acquire()
to_create = []
#to_update =[]
to_delete = []
# Check if qos exists
if self.qos_register['uuid'] == None:
self.create_qos_register(self.complete_interface_list[0]['name'])
print "received bw list: ", controller_message['bw_list']
print "old bw list: ", self.old_queue_list
for j in range(len(controller_message['bw_list'])):
# Flow still exists, getting bw/s
for k in range(len(self.old_queue_list)):
if (controller_message['bw_list'][j]['nw_src'] == self.old_queue_list[k]['nw_src']) and (controller_message['bw_list'][j]['nw_dst'] == self.old_queue_list[k]['nw_dst']):
self.set_queue_bw(self.complete_interface_list[0]['name'], k, controller_message['bw_list'][j]['bw'])
break
# If it wasn't in k-1 and k we could have a) flow ceased b) flow is a new one
if (not any(src['nw_src'] == controller_message['bw_list'][j]['nw_src'] for src in self.old_queue_list)):
# New flow does not exist in the old flow stats, append it
#new_flows_indexes.append(j)
print "New queue to create: ", controller_message['bw_list'][j]
to_create.append(controller_message['bw_list'][j])
continue
if not self.old_queue_list:
print "Empty old list!"
to_create.append(controller_message['bw_list'][j])
for j in range(len(self.old_queue_list)):
if (not any(src['nw_src'] == self.old_queue_list[j]['nw_src'] for src in controller_message['bw_list'])):
# New flow does not exist in the old flow stats, append it
print "Old flows to delete: ", self.old_queue_list[j]
to_delete.append(j)
continue
self.create_individual_queues(self.complete_interface_list[0]['name'], to_create)
self.delete_individual_queue(self.complete_interface_list[0]['name'], to_delete)
self.report_object.queues_ready(self.complete_interface_list[0],controller_message['bw_list'], self.old_queue_list)
self.lock.release()
def set_queue_bw(self, interface_name, queue_index, bw):
#ovs-vsctl set Queue e059add5-ea8d-4c05-a9be-895ab217d2b4 other-config:max-rate=99
print "Giving bw of ", bw
command = 'ovs-vsctl set Queue ' + self.old_queue_list[queue_index]['uuid'] +' other-config:max-rate=' + str(bw)
subprocess.check_output(command, shell=True)
self.old_queue_list[queue_index]['bw'] = bw
def delete_a_queue(self, a_queue):
for i in range(len(self.old_queue_list)):
if (self.old_queue_list[i]['nw_src'] == a_queue['nw_src']) and (self.old_queue_list[i]['nw_dst'] == a_queue['nw_dst']):
delete_index = i
break
command = 'ovs-vsctl remove QoS ' + self.qos_register['uuid'] + ' queues ' + str(self.old_queue_list[delete_index]['queueId'])
subprocess.check_output(command, shell=True)
command = 'ovs-vsctl destroy queue ' + str(self.old_queue_list[delete_index]['uuid'])
subprocess.check_output(command, shell=True)
self.queues_ids.remove(self.old_queue_list[delete_index]['queueId'])
del self.old_queue_list[delete_index]
def delete_individual_queue(self, interface_name, to_delete):
for i in range(len(to_delete)):
command = 'ovs-vsctl list Queue ' + '| grep ' + str(self.old_queue_list[to_delete[i]]['uuid'])
result = subprocess.check_output(command, shell=True).split('\n')[0]
print "Grep command result: ", result
if not result:
continue
command = 'ovs-vsctl remove QoS ' + self.qos_register['uuid'] + ' queues ' + str(self.old_queue_list[to_delete[i]]['queueId'])
subprocess.check_output(command, shell=True)
command = 'ovs-vsctl destroy queue ' + str(self.old_queue_list[to_delete[i]]['uuid'])
subprocess.check_output(command, shell=True)
self.queues_ids.remove(self.old_queue_list[to_delete[i]]['queueId'])
removeset = set(to_delete)
newlist = [v for k, v in enumerate(self.old_queue_list) if k not in removeset]
del self.old_queue_list[:]
for j in range(len(newlist)):
self.old_queue_list.append(newlist[j])
def create_individual_queues(self, interface_name, to_create):
#queue_list = []
#print "creating queues: ", to_create
for i in range(len(to_create)):
a_queue_dict = dict.fromkeys(['uuid', 'queueId', 'nw_src', 'nw_dst', 'bw'])
a = 0
while True:
if a not in self.queues_ids:
self.queues_ids.append(a)
break
else:
a = a +1
command = 'ovs-vsctl create Queue other-config:max-rate=' + str(to_create[i]['bw'])
an_uuid = subprocess.check_output(command, shell=True).split('\n')[0]
command = 'ovs-vsctl add Qos ' + self.qos_register['uuid'] + ' queues ' + str(a) + '=' + an_uuid
subprocess.check_output(command, shell=True)
a_queue_dict['uuid'] = an_uuid
a_queue_dict['queueId'] = a
a_queue_dict['nw_src'] = to_create[i]['nw_src']
a_queue_dict['nw_dst'] = to_create[i]['nw_dst']
a_queue_dict['bw'] = to_create[i]['bw']
self.old_queue_list.append(a_queue_dict)
def create_qos_register(self, interface_name):
#ovs-vsctl -- set Port eth0br qos=@fenceqos -- --id=@fenceqos create QoS type=linux-htb
#self.qos_register = dict.fromkeys(['uuid','port', 'id', 'min-rate', 'max-rate'] )
command = 'ovs-vsctl -- set Port ' + interface_name + ' qos=@fenceqos -- --id=@fenceqos create QoS type=linux-htb'
self.qos_register['uuid'] = subprocess.check_output(command, shell=True).split('\n')[0]
self.qos_register['port'] = interface_name
self.qos_register['id'] = 'fenceqos'
self.qos_register['max-rate'] = '900000000'
#ovs-vsctl set Qos 016d2315-6305-4692-ae89-c2a3e680e874 other-config:max-rate=1000000
print "QoS uuid: ", self.qos_register['uuid']
command = 'ovs-vsctl set Qos ' + self.qos_register['uuid'] + ' other-config:max-rate=900000000'
subprocess.check_output(command, shell=True)
def create_queues(self, controller_message):
""" Creates the QoS queues, one queue is created for each flow """
self.clear_queues(controller_message)
self.queues_creation_time = time.time()
self.complete_interface_list[0]['queueList']=self.init_queues(self.complete_interface_list[0]['name'],controller_message['bw_list'])
self.set_queues_bw(self.complete_interface_list[0]['queueList'])
self.report_object.queues_ready(self.complete_interface_list[0],controller_message['bw_list'],self.complete_interface_list[0]['queueList'])
self.queues_complete_time = time.time()
#print "Startup time: " + str(self.startup_time)
#print "Detection time: " + str(self.detection_time)
#print "Queues creation time: " + str(self.queues_creation_time)
#print "Queues complete time: " + str(self.queues_complete_time)
@classmethod
def init_queues(cls, interface_name, bw_list):
""" Inits the QoS queues """
#print "Initing queues for: " + str(interface_name)
queues_list=[]
qos_string='ovs-vsctl -- set Port ' + interface_name + ' qos=@fenceqos -- --id=@fenceqos create qos type=linux-htb other-config:max-rate=900000000'
queues_string=''
for j in range(len(bw_list)):
a_queue_dict=dict.fromkeys(['queueId','queueuuid','nw_src','nw_dst','bw'])
a_queue_dict['queueId']=j
a_queue_dict['nw_src']=bw_list[j]['nw_src']
a_queue_dict['nw_dst']=bw_list[j]['nw_dst']
a_queue_dict['bw'] = bw_list[j]['bw']
a_queue= str(a_queue_dict['queueId']) +'=@queue' + str(a_queue_dict['queueId'])
if j < len(bw_list) - 1:
a_queue = a_queue + ','
queues_string=queues_string+a_queue
queues_list.append(a_queue_dict)
queues_string='queues='+ queues_string
queues_creation=''
for j in range(len(bw_list)):
a_creation='-- --id=@queue' + str(queues_list[j]['queueId']) + ' create Queue other-config:max-rate=100000000 '
queues_creation=queues_creation+a_creation
command=qos_string + ' ' + queues_string + ' ' + queues_creation
#print "Queue command: \n " + str(command)
subprocess.check_output(command, shell=True)
# Getting uuid of each queue
queues_string = subprocess.check_output("ovs-vsctl list Queue", shell=True)
#print "Queues Ready: " + str(queues_string)
allqueues_string = subprocess.check_output("ovs-vsctl list QoS | grep queues", shell=True)
for j in range(len(queues_list)):
queues_list[j]['queueuuid']=allqueues_string.split(":")[1].split(",")[j].split("=")[1].split('}\n')[0].strip()
return queues_list
@classmethod
def set_queues_bw(cls, queues_list):
""" Sets the queue bw, according to the policy defined by the SDN controller """
for i in range(len(queues_list)):
subprocess.check_output("ovs-vsctl set queue " + queues_list[i]['queueuuid'] + " other-config:max-rate="+str(queues_list[i]['bw']), shell=True)
def ema(self, a_bar, series, period, prevma, smoothing=None):
'''Returns the Exponential Moving Average of a series.
Keyword arguments:
a_bar -- currrent index or location of the series
series -- series of values to be averaged
period -- number of values in the series to average
prevma -- previous exponential moving average
smoothing -- smoothing factor to use in the series.
valid values: between 0 & 1.
default: None - which then uses formula = 2.0 / (period + 1.0)
closer to 1 to gives greater weight to recent values - less smooth
closer to 0 gives greater weight to older values -- more smooth
'''
smoothing = 0.8
if a_bar[0] <= 0:
return series[0]
elif a_bar[0] < period:
return self.cumulative_sma(a_bar[0], series, prevma)
return prevma + smoothing * (series[a_bar[0]] - prevma)
@classmethod
def cumulative_sma(cls, a_bar, series, prevma):
"""
Returns the cumulative or unweighted simple moving average.
Avoids averaging the entire series on each call.
Keyword arguments:
a_bar -- current index or location of the value in the series
series -- list or tuple of data to average
prevma -- previous average (n - 1) of the series.
"""
if a_bar[0] <= 0:
return series[0]
else:
return prevma + ((series[a_bar[0]] - prevma) / (a_bar[0] + 1.0))
def get_sample(self, interval_time=1.0):
""" Obtains a sample of the interface occupation in bytes/s """
samples_list=[]
for j in range(len(self.complete_interface_list)):
sample_dict=dict.fromkeys(['interface_name'],['sample'])
samples_list.append(sample_dict)
#lists to Store first and second sample value of each interface
# Each value of a and b represents a sample taken in each interface
sample_1 = []
sample_2 = []
for j in range(len(self.complete_interface_list)):
sample_1.append((float(subprocess.check_output("cat /proc/net/dev | grep " + self.complete_interface_list[j]['name'] + " | awk '{print $10;}'", shell=True).split('\n')[0])))
time.sleep(interval_time)
for j in range(len(self.complete_interface_list)):
sample_2.append((float(subprocess.check_output("cat /proc/net/dev | grep " + self.complete_interface_list[j]['name'] + " | awk '{print $10;}'", shell=True).split('\n')[0])))
samples_list[j]['name'] = self.complete_interface_list[j]['name']
#samples_list[j]['sample']=((b[j]-a[j])/1048576) In MBytes
samples_list[j]['sample']=sample_2[j]-sample_1[j]
return samples_list
if __name__ == "__main__":
SOME_SAMPLES = 10
PERIOD = 3 #number of bars to average
AN_INTERVAL_TIME = 1.0
#toDo: Handle this as a percentage of total link capacity
AN_UPPER_LIMIT = 0.4
LOWER_LIMIT = 0.41
USE_AVERAGES = deque( maxlen=SOME_SAMPLES )
CODE = FlowMonitor_3(SOME_SAMPLES, AN_INTERVAL_TIME, AN_UPPER_LIMIT)
CODE.start_monitoring()
|
gpl-3.0
| -1,708,857,641,622,710,000
| 39.784355
| 243
| 0.650044
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.