Dataset Viewer
project
string | commit_sha
string | parent_sha
string | file_path
string | project_url
string | likely_bug
bool | comodified
bool | in_function
bool | diff
string | before
string | after
string | sstub_pattern
string | edit_script
string |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
oauth2client
|
02ed225de28e3f8275c35be06b7b3266d1dcbc2a
|
c0c6ab03d536e7fbec798f5f117a8325df691913
|
oauth2client/contrib/django_util/__init__.py
|
https://github.com/waprin/oauth2client
| true
| false
| false
|
@@ -100,7 +100,7 @@ request.oauth
@oauth_required
def requires_default_scopes(request):
- email = request.credentials.id_token['email']
+ email = request.oauth.credentials.id_token['email']
service = build(serviceName='calendar', version='v3',
http=request.oauth.http,
developerKey=API_KEY)
|
email = request . credentials . id_token [ 'email' ]
|
email = request . oauth . credentials . id_token [ 'email' ]
|
ADD_ATTRIBUTE_ACCESS
|
[["Insert", ["attribute", 3, 15, 3, 34], ["attribute", "N0"], 0], ["Insert", ["attribute", 3, 15, 3, 34], [".:.", "T"], 1], ["Move", "N0", ["identifier:request", 3, 15, 3, 22], 0], ["Move", "N0", [".:.", 3, 22, 3, 23], 1], ["Insert", "N0", ["identifier:oauth", "T"], 2]]
|
installsystems
|
dad43b375807faa36d19b7c3e513e678be26c81c
|
30ee020125972f0b36024251af8b6ae8b0688c8d
|
installsystems/tools.py
|
https://github.com/seblu/installsystems
| true
| false
| true
|
@@ -117,7 +117,7 @@ def abspath(path):
return path
elif ptype == "file":
if path.startswith("file://"):
- path = path[len("file://")]
+ path = path[len("file://"):]
return os.path.abspath(path)
else:
return None
|
path = path [ len ( "file://" ) ]
|
path = path [ len ( "file://" ) : ]
|
SINGLE_STMT
|
[["Insert", ["subscript", 3, 20, 3, 40], ["slice", "N0"], 2], ["Move", "N0", ["call", 3, 25, 3, 39], 0], ["Insert", "N0", [":::", "T"], 1]]
|
Theano
|
57ab4e949d0eb4df61892bde53d7e55452c01f94
|
9c9f2166dd4ce760c1ea7dd0dc08d66a65b49db7
|
theano/sandbox/linalg/ops.py
|
https://github.com/albertz/Theano
| true
| false
| true
|
@@ -1020,7 +1020,7 @@ class EighGrad(Op):
self.tri1 = lambda a: numpy.tril(a, -1)
def props(self):
- return ()
+ return (self.UPLO,)
def __hash__(self):
return hash((type(self), self.props()))
|
return ( )
|
return ( self . UPLO , )
|
ADD_ELEMENTS_TO_ITERABLE
|
[["Insert", ["tuple", 3, 16, 3, 18], ["attribute", "N0"], 1], ["Insert", ["tuple", 3, 16, 3, 18], [",:,", "T"], 2], ["Insert", "N0", ["identifier:self", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:UPLO", "T"], 2]]
|
ncstyler
|
e85a12b9ef768fe82ab9475930b3f9c09d2dae8b
|
3ab79070972e7087815f7fbbc5f300eafe8859f6
|
src/ncstyler/console.py
|
https://github.com/starofrainnight/ncstyler
| true
| false
| true
|
@@ -58,7 +58,7 @@ class Application(object):
self.__config["_base_"].update(old_base)
def parse_define(self, adefine):
- matched = re.match(r"[^\w]*(\w+)(?:\((.*)\)|\s).*", adefine)
+ matched = re.match(r"[^\w]*(\w+)(?:\((.*)\)|\s*).*", adefine)
name = matched.group(1)
parameters = []
if matched.group(2) is not None:
|
matched = re . match ( r"[^\w]*(\w+)(?:\((.*)\)|\s).*" , adefine )
|
matched = re . match ( r"[^\w]*(\w+)(?:\((.*)\)|\s*).*" , adefine )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:r\"[^\\w]*(\\w+)(?:\\((.*)\\)|\\s).*\"", 3, 28, 3, 59], "r\"[^\\w]*(\\w+)(?:\\((.*)\\)|\\s*).*\""]]
|
doorman
|
73fb8a1423b6905574b3204dcf6f0e6714c5dfc4
|
31f57a59b5e293d304aa75f06a8d475eafcb367f
|
tests/test_functional.py
|
https://github.com/KevinHock/doorman
| true
| false
| true
|
@@ -1066,7 +1066,7 @@ class TestCreateQueryPackFromUpload:
def test_pack_upload_invalid_json(self, testapp, db):
resp = testapp.post(url_for('manage.add_pack'), upload_files=[
- ('pack', 'foo.conf', 'bad data'),
+ ('pack', 'foo.conf', 'bad data'.encode('utf-8')),
])
# This won't be a redirect, since it's an error.
|
resp = testapp . post ( url_for ( 'manage.add_pack' ) , upload_files = [ ( 'pack' , 'foo.conf' , 'bad data' ) , ] )
|
resp = testapp . post ( url_for ( 'manage.add_pack' ) , upload_files = [ ( 'pack' , 'foo.conf' , 'bad data' . encode ( 'utf-8' ) ) , ] )
|
ADD_METHOD_CALL
|
[["Insert", ["tuple", 3, 13, 3, 45], ["call", "N0"], 5], ["Insert", ["tuple", 3, 13, 3, 45], ["):)", "T"], 6], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["string:'bad data'", 3, 34, 3, 44], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:encode", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["string:'utf-8'", "T"], 1], ["Move", "N2", ["):)", 3, 44, 3, 45], 2]]
|
PyMISP
|
e0b781c03344f08f1de9ef007914d505d5345bdf
|
2c8c4abe09571bcbb20d1914cd36da88932cb97e
|
pymisp/api.py
|
https://github.com/punkrokk/PyMISP
| true
| false
| true
|
@@ -66,7 +66,7 @@ class PyMISP(object):
Warning, there's a limit on the number of results
"""
session = self.__prepare_session()
- return session.get(self.rest)
+ return session.get(self.url)
def get_event(self, event_id):
"""
|
"""
session = self.__prepare_session()
return session.get(self.rest)
def get_event(self, event_id):
"""
|
"""
session = self.__prepare_session()
return session.get(self.url)
def get_event(self, event_id):
"""
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"\"\"\n session = self.__prepare_session()\n return session.get(self.rest)\n \n def get_event(self, event_id):\n \"\"\"", 1, 9, 6, 12], "\"\"\"\n session = self.__prepare_session()\n return session.get(self.url)\n \n def get_event(self, event_id):\n \"\"\""]]
|
infi.recipe.application_packager
|
86ac3d87aec9f348ba3e6df1e5d5da8fefaa2516
|
d59a5f1dc189e945d6849d76f128db89e7e063c2
|
src/infi/recipe/application_packager/msi/wix/__init__.py
|
https://github.com/Infinidat/infi.recipe.application_packager
| true
| false
| true
|
@@ -284,7 +284,7 @@ class Wix(object):
@property
def install_execute_sequence(self):
- return self.product[6]
+ return self.product[5]
def build(self, wix_basedir, input_file, output_file):
from ...utils.execute import execute_assert_success
|
return self . product [ 6 ]
|
return self . product [ 5 ]
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:6", 3, 29, 3, 30], "5"]]
|
pyparallel
|
15d2a5cc9b85f5d653a063bb529594eaa4539668
|
6d0477a80e08dd346995e5d29588a8e9d2775f64
|
Lib/tempfile.py
|
https://github.com/CompanyOnTheWorld/pyparallel
| true
| false
| true
|
@@ -250,7 +250,7 @@ def gettempdir():
_once('tempdir', _get_default_tempdir)
return tempdir
-def mkstemp(suffix="", prefix=template, dir=gettempdir(), binary=1):
+def mkstemp(suffix="", prefix=template, dir=gettempdir(), binary=True):
|
def mkstemp ( suffix = "" , prefix = template , dir = gettempdir ( ) , binary = 1 ) :
|
def mkstemp ( suffix = "" , prefix = template , dir = gettempdir ( ) , binary = True ) :
|
SINGLE_TOKEN
|
[["Insert", ["default_parameter", 3, 59, 3, 67], ["true:True", "T"], 2], ["Delete", ["integer:1", 3, 66, 3, 67]]]
|
django-rest-framework
|
4b30b320144897965bb12fa07745ba02a0452884
|
c0cf37e35dd51a1c5fb6e98d0bc3dcff0f60d412
|
rest_framework/schemas/generators.py
|
https://github.com/zarinpy/django-rest-framework
| true
| false
| true
|
@@ -151,7 +151,7 @@ class BaseSchemaGenerator(object):
# Set by 'SCHEMA_COERCE_PATH_PK'.
coerce_path_pk = None
- def __init__(self, title=None, url=None, description=None, patterns=None, urlconf=None, version='0.1.0'):
+ def __init__(self, title=None, url=None, description=None, patterns=None, urlconf=None, version=''):
if url and not url.endswith('/'):
url += '/'
|
def __init__ ( self , title = None , url = None , description = None , patterns = None , urlconf = None , version = '0.1.0' ) : if url and not url . endswith ( '/' ) : url += '/'
|
def __init__ ( self , title = None , url = None , description = None , patterns = None , urlconf = None , version = '' ) : if url and not url . endswith ( '/' ) : url += '/'
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'0.1.0'", 3, 101, 3, 108], "''"]]
|
cinder
|
eb0f2e4dd538a79184efbb23d7e404147dfe877b
|
14660991cb849c240acf41b377a7389b751cb6c7
|
cinder/tests/test_utils.py
|
https://github.com/NeCTAR-RC/cinder
| true
| false
| true
|
@@ -462,7 +462,7 @@ class GenericUtilsTestCase(test.TestCase):
self.assertIsNone(utils.check_ssh_injection(cmd_list))
def test_check_ssh_injection_on_error(self):
- with_unquoted_space = ['shh', 'my_name@ name_of_remote_computer']
+ with_unquoted_space = ['ssh', 'my_name@ name_of_remote_computer']
self.assertRaises(exception.SSHInjectionThreat,
utils.check_ssh_injection,
with_unquoted_space)
|
with_unquoted_space = [ 'shh' , 'my_name@ name_of_remote_computer' ]
|
with_unquoted_space = [ 'ssh' , 'my_name@ name_of_remote_computer' ]
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'shh'", 3, 32, 3, 37], "'ssh'"]]
|
EGAMI
|
b9b22ba63a0a331a847994c68998f99528e4e37a
|
5bff4bdd4a2877410b8e6daa070641fed1ab8ce1
|
lib/python/Screens/InfoBarGenerics.py
|
https://github.com/ostende/EGAMI
| true
| false
| true
|
@@ -1587,7 +1587,7 @@ class InfoBarAdditionalInfo:
def gotServiceEvent(self, ev):
service = self.session.nav.getCurrentService()
- if ev == iPlayableService.evStart:
+ if ev == iPlayableService.evNewProgramInfo:
self.checkTunerState(service)
class InfoBarNotifications:
|
if ev == iPlayableService . evStart : self . checkTunerState ( service )
|
if ev == iPlayableService . evNewProgramInfo : self . checkTunerState ( service )
|
CHANGE_ATTRIBUTE_USED
|
[["Update", ["identifier:evStart", 3, 29, 3, 36], "evNewProgramInfo"]]
|
ipython
|
b9ee1914b49c74e4fcd4cedad7ce1fb57f43ba39
|
c0bdb216972e5c4ef593a64d09e109876e795870
|
IPython/frontend/qt/console/frontend_widget.py
|
https://github.com/sratcliffe/ipython
| true
| false
| true
|
@@ -144,7 +144,7 @@ class FrontendWidget(HistoryConsoleWidget, BaseFrontendMixin):
document.contentsChange.connect(self._document_contents_change)
# set flag for whether we are connected via localhost
- self._local_kernel = kw.get('local_kernel', False)
+ self._local_kernel = kw.get('local_kernel', FrontendWidget._local_kernel)
#---------------------------------------------------------------------------
# 'ConsoleWidget' public interface
|
self . _local_kernel = kw . get ( 'local_kernel' , False )
|
self . _local_kernel = kw . get ( 'local_kernel' , FrontendWidget . _local_kernel )
|
SINGLE_STMT
|
[["Insert", ["argument_list", 3, 36, 3, 59], ["attribute", "N0"], 3], ["Insert", "N0", ["identifier:FrontendWidget", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:_local_kernel", "T"], 2], ["Delete", ["false:False", 3, 53, 3, 58]]]
|
DownloaderForReddit
|
0065920f18b885079e66a63fdf64051467eb5433
|
427cc14bf357c3a05a520f07b21273b1d92d59f0
|
DownloaderForReddit/Persistence/SettingsManager.py
|
https://github.com/MalloyDelacroix/DownloaderForReddit
| true
| false
| true
|
@@ -128,7 +128,7 @@ class SettingsManager:
'download_images': False,
'download_comments': False,
'download_comment_content': False,
- 'nsfw_filter': False,
+ 'download_nsfw': False,
'date_added': False
}
self.main_window_tooltip_display_dict = self.get('tooltip_display', 'main_window_tooltip_display_dict',
|
'nsfw_filter' : False ,
|
'download_nsfw' : False ,
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'nsfw_filter'", 3, 13, 3, 26], "'download_nsfw'"]]
|
django-simple-seo
|
9f971c5f098b924448be2d9270a3bd09ff8c9ee4
|
74a5fb8d91e59008152166b736daab0a327caec2
|
simple_seo/utils.py
|
https://github.com/danigosa/django-simple-seo
| true
| false
| true
|
@@ -34,7 +34,7 @@ def _load_patterns(views, patterns, namespace=None):
if isinstance(pattern, RegexURLPattern):
_load_pattern(views, pattern, namespace)
elif isinstance(pattern, RegexURLResolver):
- if namespace and hasattr(pattern, 'namespace'):
+ if namespace and hasattr(pattern, 'namespace') and getattr(pattern, 'namespace'):
namespace += ':' + pattern.namespace
elif hasattr(pattern, 'namespace'):
namespace = pattern.namespace
|
if namespace and hasattr ( pattern , 'namespace' ) : namespace += ':' + pattern . namespace elif hasattr ( pattern , 'namespace' ) : namespace = pattern . namespace
|
if namespace and hasattr ( pattern , 'namespace' ) and getattr ( pattern , 'namespace' ) : namespace += ':' + pattern . namespace elif hasattr ( pattern , 'namespace' ) : namespace = pattern . namespace
|
MORE_SPECIFIC_IF
|
[["Move", ["boolean_operator", 3, 16, 3, 59], ["boolean_operator", 3, 16, 3, 59], 0], ["Insert", ["boolean_operator", 3, 16, 3, 59], ["and:and", "T"], 1], ["Insert", ["boolean_operator", 3, 16, 3, 59], ["call", "N0"], 2], ["Insert", "N0", ["identifier:getattr", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["identifier:pattern", "T"], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["string:'namespace'", "T"], 3], ["Insert", "N1", ["):)", "T"], 4]]
|
PyFITS
|
a87cde56eea0b75c92b872bc3a0ccd3fa644120a
|
177630738eff836f9f2e67f5d3f9a3bbc2225388
|
lib/NP_pyfits.py
|
https://github.com/spacetelescope/PyFITS
| true
| false
| true
|
@@ -3112,7 +3112,7 @@ def new_table (input, header=None, nrows=0, fill=0, tbtype='BinTableHDU'):
for i in range(len(tmp)):
_arr = tmp._arrays[i]
if isinstance(_arr, Delayed):
- tmp._arrays[i] = rec.recarray.field(_arr.hdu.data,i)
+ tmp._arrays[i] = rec.recarray.field(_arr.hdu.data,_arr.field)
# use the largest column shape as the shape of the record
if nrows == 0:
|
tmp . _arrays [ i ] = rec . recarray . field ( _arr . hdu . data , i )
|
tmp . _arrays [ i ] = rec . recarray . field ( _arr . hdu . data , _arr . field )
|
SINGLE_STMT
|
[["Insert", ["argument_list", 3, 48, 3, 65], ["attribute", "N0"], 3], ["Update", ["identifier:i", 3, 63, 3, 64], "_arr"], ["Move", "N0", ["identifier:i", 3, 63, 3, 64], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:field", "T"], 2]]
|
django
|
bc7a219b1e7979ebf0b511d8a3775a0af6dbad33
|
293f7a21147ad94c92c7d5b3f33cbab2f87b001b
|
tests/regressiontests/test_client_regress/tests.py
|
https://github.com/rsalmaso/django
| true
| false
| true
|
@@ -810,7 +810,7 @@ class RequestMethodStringDataTests(TestCase):
def test_patch(self):
"Request a view with string data via request method PATCH"
# Regression test for #17797
- data = u'{"test": "json"}'
+ data = '{"test": "json"}'
response = self.client.patch('/test_client_regress/request_methods/', data=data, content_type='application/json')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, b'request method: PATCH')
|
data = u'{"test": "json"}'
|
data = '{"test": "json"}'
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:u'{\"test\": \"json\"}'", 3, 16, 3, 35], "'{\"test\": \"json\"}'"]]
|
moto
|
e951a7f2d7d45fa63031e8c914f50fbf73f9991d
|
a5ecf26f5a31dc934db1378e8ad1b48258af7487
|
moto/s3/responses.py
|
https://github.com/StartTheShift/moto
| true
| false
| true
|
@@ -130,7 +130,7 @@ def _key_response(request, full_url, headers):
if method == 'PUT':
if 'x-amz-copy-source' in request.headers:
# Copy key
- src_bucket, src_key = request.headers.get("x-amz-copy-source").split("/",2)
+ src_bucket, src_key = request.headers.get("x-amz-copy-source").split("/",1)
s3_backend.copy_key(src_bucket, src_key, bucket_name, key_name)
template = Template(S3_OBJECT_COPY_RESPONSE)
return template.render(key=src_key)
|
src_bucket , src_key = request . headers . get ( "x-amz-copy-source" ) . split ( "/" , 2 )
|
src_bucket , src_key = request . headers . get ( "x-amz-copy-source" ) . split ( "/" , 1 )
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:2", 3, 86, 3, 87], "1"]]
|
isida3
|
d5986e0e6f0a0f16aaa2550f4c6846c1c5d22f38
|
b789fe5da1e2d70c0b4b3b1d38700f44f10d005d
|
plugins/chat.py
|
https://github.com/disabler/isida3
| true
| false
| false
|
@@ -27,7 +27,7 @@ FLOOD_STATS = {}
autophrases_time = {}
-if os.path.isfile(os.path.join(loc_folder, '%s.txt' % CURRENT_LOCALE)):
+if os.path.isfile(loc_folder % CURRENT_LOCALE):
chat_folder = data_folder % 'chat/%s/' % CURRENT_LOCALE
else:
chat_folder = data_folder % 'chat/en/'
|
if os . path . isfile ( os . path . join ( loc_folder , '%s.txt' % CURRENT_LOCALE ) ) : chat_folder = data_folder % 'chat/%s/' % CURRENT_LOCALE else : chat_folder = data_folder % 'chat/en/'
|
if os . path . isfile ( loc_folder % CURRENT_LOCALE ) : chat_folder = data_folder % 'chat/%s/' % CURRENT_LOCALE else : chat_folder = data_folder % 'chat/en/'
|
SINGLE_STMT
|
[["Move", ["binary_operator", 3, 44, 3, 69], ["identifier:loc_folder", 3, 32, 3, 42], 0], ["Delete", ["identifier:os", 3, 19, 3, 21]], ["Delete", [".:.", 3, 21, 3, 22]], ["Delete", ["identifier:path", 3, 22, 3, 26]], ["Delete", ["attribute", 3, 19, 3, 26]], ["Delete", [".:.", 3, 26, 3, 27]], ["Delete", ["identifier:join", 3, 27, 3, 31]], ["Delete", ["attribute", 3, 19, 3, 31]], ["Delete", ["(:(", 3, 31, 3, 32]], ["Delete", [",:,", 3, 42, 3, 43]], ["Delete", ["string:'%s.txt'", 3, 44, 3, 52]], ["Delete", ["call", 3, 19, 3, 70]], ["Delete", ["):)", 3, 70, 3, 71]]]
|
Prass
|
75b4fe0bf9ab9dd450f1bd5f494dd506e4a904d9
|
595b61a6f240dfeb63ca15d3034f03fe05025852
|
subs.py
|
https://github.com/Zeght/Prass
| true
| false
| true
|
@@ -358,7 +358,7 @@ class AssScript(object):
if forced_resolution:
dst_width, dst_height = forced_resolution
else:
- dst_width, dst_height = other_script._sections_dict[SCRIPT_INFO_SECTION].get_resolution()
+ dst_width, dst_height = other_script._find_section(SCRIPT_INFO_SECTION).get_resolution()
if all((src_width, src_height, dst_width, dst_height)):
for style in itervalues(self._styles):
style.resample(src_width, src_height, dst_width, dst_height)
|
dst_width , dst_height = other_script . _sections_dict [ SCRIPT_INFO_SECTION ] . get_resolution ( )
|
dst_width , dst_height = other_script . _find_section ( SCRIPT_INFO_SECTION ) . get_resolution ( )
|
SINGLE_STMT
|
[["Insert", ["attribute", 3, 37, 3, 100], ["call", "N0"], 0], ["Move", "N0", ["attribute", 3, 37, 3, 64], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Update", ["identifier:_sections_dict", 3, 50, 3, 64], "_find_section"], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["identifier:SCRIPT_INFO_SECTION", 3, 65, 3, 84], 1], ["Insert", "N1", ["):)", "T"], 2], ["Delete", ["[:[", 3, 64, 3, 65]], ["Delete", ["]:]", 3, 84, 3, 85]], ["Delete", ["subscript", 3, 37, 3, 85]]]
|
l10n-switzerland
|
f8a93a57cf5779d11ce1e56a6e810c34c5a7f68d
|
0af5915605337dabca0d0f58fec0e55829ff6e9b
|
l10n_ch/wizard/bvr_import.py
|
https://github.com/CompassionCH/l10n-switzerland
| true
| false
| true
|
@@ -168,7 +168,7 @@ def _import(self, cursor, user, data, context=None):
# line2reconcile = line.id
account_id = line.account_id.id
break
- result = voucher_obj.onchange_partner_id(cursor, user, [], partner_id, journal_id=statement.journal_id.id, price=abs(record['amount']), currency_id= statement.currency.id, ttype='receipt', date=statement.date ,context=context)
+ result = voucher_obj.onchange_partner_id(cursor, user, [], partner_id, journal_id=statement.journal_id.id, price=abs(record['amount']), voucher_currency_id= statement.currency.id, ttype='receipt', date=statement.date ,context=context)
voucher_res = { 'type': 'receipt' ,
'name': values['name'],
|
result = voucher_obj . onchange_partner_id ( cursor , user , [ ] , partner_id , journal_id = statement . journal_id . id , price = abs ( record [ 'amount' ] ) , currency_id = statement . currency . id , ttype = 'receipt' , date = statement . date , context = context )
|
result = voucher_obj . onchange_partner_id ( cursor , user , [ ] , partner_id , journal_id = statement . journal_id . id , price = abs ( record [ 'amount' ] ) , voucher_currency_id = statement . currency . id , ttype = 'receipt' , date = statement . date , context = context )
|
CHANGE_KEYWORD_ARGUMENT_USED
|
[["Update", ["identifier:currency_id", 3, 145, 3, 156], "voucher_currency_id"]]
|
crm_frappe_v8
|
918a388befda02c1234d4ede0c2bbba6ea1b5851
|
a8c96957bb222247303a157ff6523f71b0fa8054
|
webnotes/utils/backups.py
|
https://github.com/indictranstech/crm_frappe_v8
| true
| false
| true
|
@@ -43,7 +43,7 @@ class BackupGenerator:
last_db, last_file = self.get_recent_backup(older_than)
if not (self.backup_path_files and self.backup_path_db):
self.set_backup_file_name()
- if not last_db and last_file:
+ if not last_db and not last_file:
self.take_dump()
if not ignore_files:
self.zip_files()
|
if not last_db and last_file : self . take_dump ( ) if not ignore_files : self . zip_files ( )
|
if not last_db and not last_file : self . take_dump ( ) if not ignore_files : self . zip_files ( )
|
CHANGE_UNARY_OPERATOR
|
[["Insert", ["boolean_operator", 3, 10, 3, 31], ["not_operator", "N0"], 2], ["Insert", "N0", ["not:not", "T"], 0], ["Move", "N0", ["identifier:last_file", 3, 22, 3, 31], 1]]
|
sentry
|
7b18a0a21b39d39b9e0be754b0b09f36396d5892
|
f62b109edb8b5f8cd365a7d9b5adc5a0537b5854
|
src/sentry/utils/redis.py
|
https://github.com/awesome-python/sentry
| true
| false
| true
|
@@ -51,7 +51,7 @@ def make_rb_cluster(*args, **kwargs):
# plugin compatibility but isn't actionable by the system administrator.
import warnings
warnings.warn(
- 'Direct Redis cluster construction is deprecated, please use named clusters. ',
+ 'Direct Redis cluster construction is deprecated, please use named clusters. '
'Direct cluster construction will be removed in Sentry 8.5.',
DeprecationWarning,
)
|
warnings . warn ( 'Direct Redis cluster construction is deprecated, please use named clusters. ' , 'Direct cluster construction will be removed in Sentry 8.5.' , DeprecationWarning , )
|
warnings . warn ( 'Direct Redis cluster construction is deprecated, please use named clusters. ' 'Direct cluster construction will be removed in Sentry 8.5.' , DeprecationWarning , )
|
SINGLE_STMT
|
[["Move", [",:,", 3, 87, 3, 88], ["argument_list", 2, 18, 6, 6], 4], ["Insert", ["argument_list", 2, 18, 6, 6], ["concatenated_string", "N0"], 1], ["Move", "N0", ["string:'Direct Redis cluster construction is deprecated, please use named clusters. '", 3, 9, 3, 87], 0], ["Move", "N0", ["string:'Direct cluster construction will be removed in Sentry 8.5.'", 4, 9, 4, 69], 1], ["Delete", [",:,", 5, 27, 5, 28]]]
|
invenio
|
b922549e556cca2459982def50841a6f6b0d39c3
|
56c4a8d8109f20680c40245e0a60405c57cc587b
|
modules/bibformat/lib/elements/bfe_editors.py
|
https://github.com/valkyriesavage/invenio
| true
| false
| true
|
@@ -33,7 +33,7 @@ def format(bfo, limit, separator=' ; ',extension='[...]', print_links="yes"):
authors = bibrecord.record_get_field_instances(bfo.get_record(), '100')
- editors = [author for author in authors if bibrecord.field_get_subfield_values(author, "e")=="ed." ]
+ editors = [bibrecord.field_get_subfield_values(author, 'a')[0] for author in authors if len(bibrecord.field_get_subfield_values(author, "e")) > 0 and bibrecord.field_get_subfield_values(author, "e")[0]=="ed." ]
if print_links.lower() == "yes":
editors = map(lambda x: '<a href="'+weburl+'/search.py?f=author&p='+ quote(x) +'">'+x+'</a>', editors)
|
editors = [ author for author in authors if bibrecord . field_get_subfield_values ( author , "e" ) == "ed." ]
|
editors = [ bibrecord . field_get_subfield_values ( author , 'a' ) [ 0 ] for author in authors if len ( bibrecord . field_get_subfield_values ( author , "e" ) ) > 0 and bibrecord . field_get_subfield_values ( author , "e" ) [ 0 ] == "ed." ]
|
SINGLE_STMT
|
[["Insert", ["list_comprehension", 3, 15, 3, 105], ["subscript", "N0"], 1], ["Insert", "N0", ["call", "N1"], 0], ["Insert", "N0", ["[:[", "T"], 1], ["Insert", "N0", ["integer:0", "T"], 2], ["Insert", "N0", ["]:]", "T"], 3], ["Insert", ["if_clause", 3, 45, 3, 103], ["boolean_operator", "N2"], 1], ["Insert", "N1", ["attribute", "N3"], 0], ["Insert", "N1", ["argument_list", "N4"], 1], ["Move", "N2", ["comparison_operator", 3, 48, 3, 103], 0], ["Insert", "N2", ["and:and", "T"], 1], ["Insert", "N2", ["comparison_operator", "N5"], 2], ["Insert", "N3", ["identifier:bibrecord", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:field_get_subfield_values", "T"], 2], ["Insert", "N4", ["(:(", "T"], 0], ["Move", "N4", ["identifier:author", 3, 16, 3, 22], 1], ["Insert", "N4", [",:,", "T"], 2], ["Insert", "N4", ["string:'a'", "T"], 3], ["Insert", "N4", ["):)", "T"], 4], ["Insert", ["comparison_operator", 3, 48, 3, 103], ["call", "N6"], 0], ["Insert", ["comparison_operator", 3, 48, 3, 103], [">:>", "T"], 1], ["Insert", ["comparison_operator", 3, 48, 3, 103], ["integer:0", "T"], 2], ["Insert", "N5", ["subscript", "N7"], 0], ["Move", "N5", ["==:==", 3, 96, 3, 98], 1], ["Move", "N5", ["string:\"ed.\"", 3, 98, 3, 103], 2], ["Insert", "N6", ["identifier:len", "T"], 0], ["Insert", "N6", ["argument_list", "N8"], 1], ["Insert", "N7", ["call", "N9"], 0], ["Insert", "N7", ["[:[", "T"], 1], ["Insert", "N7", ["integer:0", "T"], 2], ["Insert", "N7", ["]:]", "T"], 3], ["Insert", "N8", ["(:(", "T"], 0], ["Move", "N8", ["call", 3, 48, 3, 96], 1], ["Insert", "N8", ["):)", "T"], 2], ["Insert", "N9", ["attribute", "N10"], 0], ["Insert", "N9", ["argument_list", "N11"], 1], ["Insert", "N10", ["identifier:bibrecord", "T"], 0], ["Insert", "N10", [".:.", "T"], 1], ["Insert", "N10", ["identifier:field_get_subfield_values", "T"], 2], ["Insert", "N11", ["(:(", "T"], 0], ["Insert", "N11", ["identifier:author", "T"], 1], ["Insert", "N11", [",:,", "T"], 2], ["Insert", "N11", ["string:\"e\"", "T"], 3], ["Insert", "N11", ["):)", "T"], 4]]
|
pontoon
|
2c9744b8b840077f416b1557b6b868a82f39f236
|
350d5fcfc7be4aa030508728e290b367e8ad42b4
|
pontoon/administration/utils/files.py
|
https://github.com/Bostonncity/pontoon
| true
| false
| true
|
@@ -153,7 +153,7 @@ def get_locale_directory(project, locale):
'path': path,
}
- log.debug("Locale repository path not found.")
+ log.error("Locale repository path not found.")
def detect_format(path):
|
log . debug ( "Locale repository path not found." )
|
log . error ( "Locale repository path not found." )
|
WRONG_FUNCTION_NAME
|
[["Update", ["identifier:debug", 3, 9, 3, 14], "error"]]
|
Hospital-bench
|
262d05e4d856357ab47c52ee02cb45ba07d2b203
|
b3071162c86b89deb7f37e5ef3236a1b8610d8fe
|
playbooks/install.py
|
https://github.com/indictranstech/Hospital-bench
| true
| false
| true
|
@@ -96,7 +96,7 @@ def clone_bench_repo():
})
success = run_os_command(
- {'git': 'git clone https://github.com/frappe/bench {bench_repo} --depth 1 --branch new-install'.format(bench_repo=bench_repo)}
+ {'git': 'git clone https://github.com/frappe/bench {bench_repo} --depth 1'.format(bench_repo=bench_repo)}
)
return success
|
success = run_os_command ( { 'git' : 'git clone https://github.com/frappe/bench {bench_repo} --depth 1 --branch new-install' . format ( bench_repo = bench_repo ) } )
|
success = run_os_command ( { 'git' : 'git clone https://github.com/frappe/bench {bench_repo} --depth 1' . format ( bench_repo = bench_repo ) } )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'git clone https://github.com/frappe/bench {bench_repo} --depth 1 --branch new-install'", 3, 11, 3, 98], "'git clone https://github.com/frappe/bench {bench_repo} --depth 1'"]]
|
knowledge-repo
|
ebfa83fcb2512149332a7efee7db9833dda865be
|
1c8c5ad11a8a3aee76ca54fb54f352765eaea9b6
|
knowledge_repo/app/routes/web_editor.py
|
https://github.com/undeadinu/knowledge-repo
| true
| false
| true
|
@@ -274,7 +274,7 @@ def save_post():
headers['created_at'] = datetime.strptime(data['created_at'], '%Y-%m-%d')
headers['updated_at'] = datetime.strptime(data['updated_at'], '%Y-%m-%d')
headers['title'] = str(data['title'])
- headers['path'] = post.path
+ headers['path'] = str(post.path)
headers['project'] = str(data['project'])
# TODO: thumbnail header not working currently, as feed image set with kp
# method not based on header
|
headers [ 'path' ] = post . path
|
headers [ 'path' ] = str ( post . path )
|
ADD_FUNCTION_AROUND_EXPRESSION
|
[["Insert", ["assignment", 3, 5, 3, 32], ["call", "N0"], 2], ["Insert", "N0", ["identifier:str", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["attribute", 3, 23, 3, 32], 1], ["Insert", "N1", ["):)", "T"], 2]]
|
connector-magento
|
90c346cf5422cd745973ba1e04f336144ae50a2d
|
9ee2c43730369afc4ab791a475907dd9f789e32c
|
magentoerpconnect/unit/import_synchronizer.py
|
https://github.com/SportPursuit/connector-magento
| true
| false
| true
|
@@ -393,11 +393,11 @@ class TranslationImporter(ImportSynchronizer):
context = session.context.copy()
context['lang'] = storeview.lang_id.code
- session.pool.get('magento.product.category').write(session.cr,
- session.uid,
- openerp_id,
- data,
- context=context)
+ self.model.write(session.cr,
+ session.uid,
+ openerp_id,
+ data,
+ context=context)
@magento
|
session . pool . get ( 'magento.product.category' ) . write ( session . cr , session . uid , openerp_id , data , context = context )
|
self . model . write ( session . cr , session . uid , openerp_id , data , context = context )
|
SINGLE_STMT
|
[["Move", ["attribute", 3, 13, 3, 63], ["attribute", 3, 13, 3, 25], 0], ["Move", ["attribute", 3, 13, 3, 63], [".:.", 3, 25, 3, 26], 1], ["Update", ["identifier:session", 3, 13, 3, 20], "self"], ["Update", ["identifier:pool", 3, 21, 3, 25], "model"], ["Delete", ["identifier:get", 3, 26, 3, 29]], ["Delete", ["attribute", 3, 13, 3, 29]], ["Delete", ["(:(", 3, 29, 3, 30]], ["Delete", ["string:'magento.product.category'", 3, 30, 3, 56]], ["Delete", ["):)", 3, 56, 3, 57]], ["Delete", ["argument_list", 3, 29, 3, 57]], ["Delete", ["call", 3, 13, 3, 57]], ["Delete", [".:.", 3, 57, 3, 58]]]
|
textacy
|
8f98b76ddfa365b9787f43e33a50668bb0e50ddc
|
fe2e7306705fb9e1ae90e0a9e86626bad3f5fdcb
|
textacy/text_stats.py
|
https://github.com/joshlk/textacy
| true
| false
| true
|
@@ -31,7 +31,7 @@ def readability_stats(doc):
n_sents = doc.n_sents
- words = doc.words(filter_punct=True)
+ words = doc.words(filter_punct=True, filter_stops=False, filter_nums=False)
n_words = len(words)
n_unique_words = len({word.lower for word in words})
n_chars = sum(len(word) for word in words)
|
words = doc . words ( filter_punct = True )
|
words = doc . words ( filter_punct = True , filter_stops = False , filter_nums = False )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 22, 3, 41], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 22, 3, 41], ["keyword_argument", "N0"], 3], ["Insert", ["argument_list", 3, 22, 3, 41], [",:,", "T"], 4], ["Insert", ["argument_list", 3, 22, 3, 41], ["keyword_argument", "N1"], 5], ["Insert", "N0", ["identifier:filter_stops", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["false:False", "T"], 2], ["Insert", "N1", ["identifier:filter_nums", "T"], 0], ["Insert", "N1", ["=:=", "T"], 1], ["Insert", "N1", ["false:False", "T"], 2]]
|
KomodoEdit
|
faabecb67f005d06f662fd3b3a4ee54b2f99eb72
|
d7f422ff729f0a5af55f98cdb39087a89ce9e030
|
bklocal.py
|
https://github.com/benpope82/KomodoEdit
| true
| false
| true
|
@@ -52,7 +52,7 @@ def _getLinuxDistro():
if os.path.exists(redhatRelease):
content = open(redhatRelease).read()
pattern = re.compile("^Red Hat Linux release ([\d\.]+)")
- fedoraPattern = re.compile("^Fedora (Core )?release ([\d\.]+)")
+ fedoraPattern = re.compile("^Fedora (?:Core )?release ([\d\.]+)")
match = pattern.search(content)
fedoraMatch = fedoraPattern.search(content)
if match:
|
fedoraPattern = re . compile ( "^Fedora (Core )?release ([\d\.]+)" )
|
fedoraPattern = re . compile ( "^Fedora (?:Core )?release ([\d\.]+)" )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"^Fedora (Core )?release ([\\d\\.]+)\"", 3, 36, 3, 71], "\"^Fedora (?:Core )?release ([\\d\\.]+)\""]]
|
ilastik
|
82635830a7e4f0835c01b8fe997d3601017e770e
|
50ce2bcf18b82c182667ba8291d2a6c556ce16e3
|
ilastik/applets/objectClassification/opObjectClassification.py
|
https://github.com/jakirkham/ilastik
| true
| false
| true
|
@@ -385,7 +385,7 @@ class OpToImage(Operator):
newTmap[:len(tmap)] = tmap[:]
tmap = newTmap
- img[t] = tmap[img[t]]
+ img[t-roi.start[0]] = tmap[img[t-roi.start[0]]]
return img
|
img [ t ] = tmap [ img [ t ] ]
|
img [ t - roi . start [ 0 ] ] = tmap [ img [ t - roi . start [ 0 ] ] ]
|
SINGLE_STMT
|
[["Insert", ["subscript", 3, 13, 3, 19], ["binary_operator", "N0"], 2], ["Move", ["subscript", 3, 13, 3, 19], ["]:]", 3, 32, 3, 33], 3], ["Insert", ["subscript", 3, 22, 3, 34], ["[:[", "T"], 1], ["Move", "N0", ["identifier:t", 3, 17, 3, 18], 0], ["Insert", "N0", ["-:-", "T"], 1], ["Insert", "N0", ["subscript", "N1"], 2], ["Insert", ["subscript", 3, 27, 3, 33], ["binary_operator", "N2"], 2], ["Insert", ["subscript", 3, 27, 3, 33], ["]:]", "T"], 3], ["Insert", "N1", ["attribute", "N3"], 0], ["Move", "N1", ["[:[", 3, 26, 3, 27], 1], ["Insert", "N1", ["integer:0", "T"], 2], ["Move", "N1", ["]:]", 3, 18, 3, 19], 3], ["Move", "N2", ["identifier:t", 3, 31, 3, 32], 0], ["Insert", "N2", ["-:-", "T"], 1], ["Insert", "N2", ["subscript", "N4"], 2], ["Insert", "N3", ["identifier:roi", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:start", "T"], 2], ["Insert", "N4", ["attribute", "N5"], 0], ["Insert", "N4", ["[:[", "T"], 1], ["Insert", "N4", ["integer:0", "T"], 2], ["Insert", "N4", ["]:]", "T"], 3], ["Insert", "N5", ["identifier:roi", "T"], 0], ["Insert", "N5", [".:.", "T"], 1], ["Insert", "N5", ["identifier:start", "T"], 2]]
|
Uranium
|
45bd259acf2e7f520dda285a568e54eb018fddfb
|
c91b76ac2f0723ef99a074312d52a8fd6015c841
|
plugins/FileHandlers/STLReader/STLReader.py
|
https://github.com/Ultimaker/Uranium
| true
| false
| true
|
@@ -95,7 +95,7 @@ class STLReader(MeshReader):
array[:, [frm, to]] = array[:, [to, frm]]
def _loadWithNumpySTL(self, file_name, mesh_builder):
- for loaded_data in stl.mesh.Mesh.from_multi_file(file_name):
+ for loaded_data in stl.mesh.Mesh.from_multi_file(file_name, mode=stl.stl.Mode.AUTOMATIC):
vertices = numpy.resize(loaded_data.points.flatten(), (int(loaded_data.points.size / 3), 3))
# Invert values of second column
|
for loaded_data in stl . mesh . Mesh . from_multi_file ( file_name ) : vertices = numpy . resize ( loaded_data . points . flatten ( ) , ( int ( loaded_data . points . size / 3 ) , 3 ) )
|
for loaded_data in stl . mesh . Mesh . from_multi_file ( file_name , mode = stl . stl . Mode . AUTOMATIC ) : vertices = numpy . resize ( loaded_data . points . flatten ( ) , ( int ( loaded_data . points . size / 3 ) , 3 ) )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 57, 3, 68], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 57, 3, 68], ["keyword_argument", "N0"], 3], ["Insert", "N0", ["identifier:mode", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["attribute", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:AUTOMATIC", "T"], 2], ["Insert", "N2", ["attribute", "N3"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:Mode", "T"], 2], ["Insert", "N3", ["identifier:stl", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:stl", "T"], 2]]
|
sqlmapproject
|
72cf9041bf4f721b2ec0f72e077da66bbce9c6bf
|
90329a8b01ecc77677d89c4a20731e1bc0e8d0dd
|
plugins/generic/databases.py
|
https://github.com/y1ngz1/sqlmapproject
| true
| false
| true
|
@@ -805,7 +805,7 @@ class Databases:
elif "." in conf.tbl:
if not conf.db:
- conf.db, conf.tbl = conf.tbl.split(".")
+ conf.db, conf.tbl = conf.tbl.split('.', 1)
if conf.tbl is not None and conf.db is None and Backend.getIdentifiedDbms() not in (DBMS.SQLITE, DBMS.ACCESS, DBMS.FIREBIRD):
warnMsg = "missing database parameter. sqlmap is going to "
|
conf . db , conf . tbl = conf . tbl . split ( "." )
|
conf . db , conf . tbl = conf . tbl . split ( '.' , 1 )
|
SINGLE_STMT
|
[["Update", ["string:\".\"", 3, 52, 3, 55], "'.'"], ["Insert", ["argument_list", 3, 51, 3, 56], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 51, 3, 56], ["integer:1", "T"], 3]]
|
epsg.io
|
ad2f092f6fc41d781a4ebf8f485bdbb4b8e5cf58
|
fca5c2c11074e4bf7be8e6202d55e8057fdca644
|
app.py
|
https://github.com/klokantech/epsg.io
| true
| false
| true
|
@@ -474,7 +474,7 @@ def index(id):
export = {}
export['prettywkt'] = ref.ExportToPrettyWkt()
- if int(try_code) == 0:
+ if int(error_code) == 0:
export['usgs'] = str(ref.ExportToUSGS())
export['ogcwkt'] = ref.ExportToWkt()
export['proj4'] = ref.ExportToProj4()
|
if int ( try_code ) == 0 : export [ 'usgs' ] = str ( ref . ExportToUSGS ( ) ) export [ 'ogcwkt' ] = ref . ExportToWkt ( ) export [ 'proj4' ] = ref . ExportToProj4 ( )
|
if int ( error_code ) == 0 : export [ 'usgs' ] = str ( ref . ExportToUSGS ( ) ) export [ 'ogcwkt' ] = ref . ExportToWkt ( ) export [ 'proj4' ] = ref . ExportToProj4 ( )
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:try_code", 3, 14, 3, 22], "error_code"]]
|
maven-repository-builder
|
8c35848c46e8a5ed3c3178f1cb77484cfd030f9d
|
fc45db74c30c41765f8e877a023622af7e7a71dc
|
artifact_list_builder.py
|
https://github.com/jboss-eap/maven-repository-builder
| true
| false
| true
|
@@ -341,7 +341,7 @@ class ArtifactListBuilder:
d.setdefault(extension, set()).update(classifiers)
def _getArtifactVersionREString(self, artifactId, version):
- if version == "SNAPSHOT":
+ if version[-9:] == "-SNAPSHOT":
# """Prepares the version string to be part of regular expression for filename and when the
# version is a snapshot version, it corrects the suffix to match even when the files are
# named with the timestamp and build number as usual in case of snapshot versions."""
|
if version == "SNAPSHOT" :
|
if version [ - 9 : ] == "-SNAPSHOT" :
|
SINGLE_STMT
|
[["Insert", ["comparison_operator", 3, 12, 3, 33], ["subscript", "N0"], 0], ["Update", ["string:\"SNAPSHOT\"", 3, 23, 3, 33], "\"-SNAPSHOT\""], ["Move", "N0", ["identifier:version", 3, 12, 3, 19], 0], ["Insert", "N0", ["[:[", "T"], 1], ["Insert", "N0", ["slice", "N1"], 2], ["Insert", "N0", ["]:]", "T"], 3], ["Insert", "N1", ["unary_operator", "N2"], 0], ["Insert", "N1", [":::", "T"], 1], ["Insert", "N2", ["-:-", "T"], 0], ["Insert", "N2", ["integer:9", "T"], 1]]
|
aquilon
|
92d2ff8a16b7419123a848f98ec8594694c5203a
|
5c681e94cfa4490b2ba9742ffad409511c06dc06
|
lib/python2.6/aquilon/client/depends.py
|
https://github.com/quattor/aquilon
| true
| false
| false
|
@@ -31,4 +31,4 @@ if sys.platform == "sunos5":
ms.version.addpkg("ctypes", "1.0.2")
# required to move the ctypes path before the core paths
- sys.path[0] = sys.path.pop()
+ sys.path.insert(0, sys.path.pop())
|
sys . path [ 0 ] = sys . path . pop ( )
|
sys . path . insert ( 0 , sys . path . pop ( ) )
|
SINGLE_STMT
|
[["Insert", ["expression_statement", 3, 5, 3, 33], ["call", "N0"], 0], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["attribute", 3, 5, 3, 13], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:insert", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["integer:0", 3, 14, 3, 15], 1], ["Insert", "N2", [",:,", "T"], 2], ["Move", "N2", ["call", 3, 19, 3, 33], 3], ["Insert", "N2", ["):)", "T"], 4], ["Delete", ["[:[", 3, 13, 3, 14]], ["Delete", ["]:]", 3, 15, 3, 16]], ["Delete", ["subscript", 3, 5, 3, 16]], ["Delete", ["=:=", 3, 17, 3, 18]], ["Delete", ["assignment", 3, 5, 3, 33]]]
|
oq-engine
|
c6c48c9b872928e1989b96f811c1ca5d28adffc7
|
8f1715ea92dcdd5f8aece4943d1d9136fcf5c3ac
|
openquake/hazardlib/tests/calc/stochastic_test.py
|
https://github.com/gem/oq-engine
| true
| false
| true
|
@@ -119,6 +119,6 @@ class StochasticEventSetTestCase(unittest.TestCase):
gsims = [SiMidorikawa1999SInter()]
dic = sample_ruptures(group, s_filter, gsims, param)
self.assertEqual(dic['num_ruptures'], 19) # total ruptures
- self.assertEqual(dic['num_events'], 13)
+ self.assertEqual(dic['num_events'], 16)
self.assertEqual(len(dic['eb_ruptures']), 8)
self.assertEqual(len(dic['calc_times']), 15) # mutex sources
|
self . assertEqual ( dic [ 'num_events' ] , 13 )
|
self . assertEqual ( dic [ 'num_events' ] , 16 )
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:13", 3, 45, 3, 47], "16"]]
|
sunpy
|
d665ded83cf758ca63d851da58c8a68f78af8f75
|
6893002c7a6e8bec50aab75f41d441cba7dc829a
|
setup.py
|
https://github.com/openspaceagency/sunpy
| true
| false
| true
|
@@ -43,7 +43,7 @@ def install(setup): #pylint: disable=W0621
'scipy',
'suds',
'pandas',
- 'pyqt4',
+ 'PyQt4',
'matplotlib>=1.0'
],
license="BSD",
|
'pyqt4' ,
|
'PyQt4' ,
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'pyqt4'", 3, 13, 3, 20], "'PyQt4'"]]
|
setuptools
|
34910765fbafb53bec6604b730875d010a863ae2
|
5a9aed6e210628d16cd446c163fa50c9841dba34
|
setuptools/command/test.py
|
https://github.com/gaborbernat/setuptools
| true
| false
| true
|
@@ -138,7 +138,7 @@ class test(Command):
if self.distribution.tests_require:
self.distribution.fetch_build_eggs(self.distribution.tests_require)
- if self.test_suite:
+ if True:
cmd = ' '.join(self._argv)
if self.dry_run:
self.announce('skipping "%s" (dry run)' % cmd)
|
if self . test_suite : cmd = ' ' . join ( self . _argv ) if self . dry_run : self . announce ( 'skipping "%s" (dry run)' % cmd )
|
if True : cmd = ' ' . join ( self . _argv ) if self . dry_run : self . announce ( 'skipping "%s" (dry run)' % cmd )
|
SINGLE_STMT
|
[["Insert", ["if_statement", 3, 9, 6, 63], ["true:True", "T"], 1], ["Delete", ["identifier:self", 3, 12, 3, 16]], ["Delete", [".:.", 3, 16, 3, 17]], ["Delete", ["identifier:test_suite", 3, 17, 3, 27]], ["Delete", ["attribute", 3, 12, 3, 27]]]
|
olympia
|
a461367b36748223c249071e1763d7f3af12efb6
|
319c8d4feaad6d2cada45111f16fc5c5fdaa75dd
|
mkt/reviewers/views_themes.py
|
https://github.com/thefourtheye/olympia
| true
| false
| true
|
@@ -341,7 +341,7 @@ def themes_single(request, slug):
'theme_count': 1,
'rereview': rereview,
'reviewable': reviewable,
- 'reject_reasons': rvw.THEME_REJECT_REASONS.items(),
+ 'reject_reasons': rvw.THEME_REJECT_REASONS,
'action_dict': rvw.REVIEW_ACTIONS,
}))
|
'reject_reasons' : rvw . THEME_REJECT_REASONS . items ( ) ,
|
'reject_reasons' : rvw . THEME_REJECT_REASONS ,
|
SINGLE_STMT
|
[["Move", ["expression_statement", 3, 9, 3, 60], ["attribute", 3, 9, 3, 51], 0], ["Delete", [".:.", 3, 51, 3, 52]], ["Delete", ["identifier:items", 3, 52, 3, 57]], ["Delete", ["attribute", 3, 9, 3, 57]], ["Delete", ["(:(", 3, 57, 3, 58]], ["Delete", ["):)", 3, 58, 3, 59]], ["Delete", ["argument_list", 3, 57, 3, 59]], ["Delete", ["call", 3, 9, 3, 59]]]
|
erpnext
|
f8cc1a6db60203a6a8502b41a3e60a7f661739d9
|
c81cf721a825e994183bd010775708b2fde2de6c
|
patches/march_2013/p03_rename_blog_to_blog_post.py
|
https://github.com/arrayest/erpnext
| true
| false
| true
|
@@ -3,7 +3,7 @@ import webnotes
def execute():
webnotes.reload_doc('website', 'doctype', 'blogger')
webnotes.rename_doc("DocType", "Blog", "Blog Post", force=True)
- webnotes.conn.reload_doc('website', 'doctype', 'blog_post')
+ webnotes.reload_doc('website', 'doctype', 'blog_post')
webnotes.conn.sql('''update tabBlogger set posts=(select count(*)
from `tabBlog Post` where ifnull(blogger,"")=tabBlogger.name)''')
webnotes.conn.sql("""update `tabBlog Post` set published_on=creation""")
|
webnotes . conn . reload_doc ( 'website' , 'doctype' , 'blog_post' )
|
webnotes . reload_doc ( 'website' , 'doctype' , 'blog_post' )
|
SINGLE_STMT
|
[["Delete", ["identifier:conn", 3, 11, 3, 15]], ["Delete", [".:.", 3, 15, 3, 16]]]
|
prettychart
|
3efedb8a6b94d5ef91c895dd5dca82b1c16b7434
|
0e56ae1da29615fdb91d58535a862ecc0e375041
|
website/addons/github/model.py
|
https://github.com/bdyetton/prettychart
| true
| false
| true
|
@@ -448,7 +448,7 @@ class AddonGitHubNodeSettings(AddonNodeSettingsBase):
try:
response = connection.delete_hook(self.user, self.repo, self.hook_id)
except NotFoundError:
- pass
+ return False
if response:
self.hook_id = None
if save:
|
pass
|
return False
|
SINGLE_STMT
|
[["Insert", ["block", 3, 17, 3, 21], ["return_statement", "N0"], 0], ["Insert", "N0", ["return:return", "T"], 0], ["Insert", "N0", ["false:False", "T"], 1], ["Delete", ["pass:pass", 3, 17, 3, 21]], ["Delete", ["pass_statement", 3, 17, 3, 21]]]
|
beaker_extensions
|
8af0f1128a9c357542fd087e3cb3336f35bf1f7f
|
3c1c72b133b622a09f4b0fc4765f4f6a9612fa8a
|
beaker_extensions/nosql.py
|
https://github.com/YuanrongZheng/beaker_extensions
| true
| false
| true
|
@@ -23,7 +23,7 @@ class NoSqlManager(NamespaceManager):
self.lock_dir = lock_dir
elif data_dir:
self.lock_dir = data_dir + "/container_tcd_lock"
- if self.lock_dir:
+ if hasattr(self, 'lock_dir'):
verify_directory(self.lock_dir)
conn_params = {}
|
if self . lock_dir : verify_directory ( self . lock_dir )
|
if hasattr ( self , 'lock_dir' ) : verify_directory ( self . lock_dir )
|
SINGLE_STMT
|
[["Insert", ["if_statement", 3, 9, 4, 44], ["call", "N0"], 1], ["Insert", "N0", ["identifier:hasattr", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["identifier:self", 3, 12, 3, 16], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["string:'lock_dir'", "T"], 3], ["Insert", "N1", ["):)", "T"], 4], ["Delete", [".:.", 3, 16, 3, 17]], ["Delete", ["identifier:lock_dir", 3, 17, 3, 25]], ["Delete", ["attribute", 3, 12, 3, 25]]]
|
For-Discord
|
422a7f6063a9a9831cdb21fa3860d3b65682cea6
|
974155f2bcd91ea3eae222990d96e3d4d5879e67
|
beetsplug/replaygain.py
|
https://github.com/AmazingDude/For-Discord
| true
| false
| true
|
@@ -628,7 +628,7 @@ class ReplayGainPlugin(BeetsPlugin):
try:
self.backend_instance = self.backends[backend_name](
- self.config
+ self.config, self._log
)
except (ReplayGainError, FatalReplayGainError) as e:
raise ui.UserError(
|
self . backend_instance = self . backends [ backend_name ] ( self . config )
|
self . backend_instance = self . backends [ backend_name ] ( self . config , self . _log )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 2, 64, 4, 14], [",:,", "T"], 2], ["Insert", ["argument_list", 2, 64, 4, 14], ["attribute", "N0"], 3], ["Insert", "N0", ["identifier:self", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:_log", "T"], 2]]
|
nipype
|
9769e1bd2cc51e2ac29c1f132d0473e7ef294b59
|
e3fdae714a4ff35bcfeeadab73574da9cacc8db2
|
nipype/algorithms/mesh.py
|
https://github.com/kaczmarj/nipype
| true
| false
| false
|
@@ -44,7 +44,7 @@ class WarpPoints(BaseInterface):
-------
>>> from nipype.algorithms.mesh import WarpPoints
- >>> wp = mesh.P2PDistance()
+ >>> wp = WarpPoints()
>>> wp.inputs.points = 'surf1.vtk'
>>> wp.inputs.warp = 'warpfield.nii'
>>> res = wp.run() # doctest: +SKIP
|
wp = mesh . P2PDistance ( )
|
wp = WarpPoints ( )
|
SINGLE_STMT
|
[["Update", ["identifier:mesh", 3, 14, 3, 18], "WarpPoints"], ["Move", ["call", 3, 14, 3, 32], ["identifier:mesh", 3, 14, 3, 18], 0], ["Delete", [".:.", 3, 18, 3, 19]], ["Delete", ["identifier:P2PDistance", 3, 19, 3, 30]], ["Delete", ["attribute", 3, 14, 3, 30]]]
|
tahoe-lafs
|
9f0ee4b7586e0397175b7dbf30d1ad655e356dc7
|
168a8c3b73fd4dfae0e4a7bc463b3f00ab3227c0
|
src/allmydata/test/test_system.py
|
https://github.com/LeastAuthority/tahoe-lafs
| true
| false
| false
|
@@ -13,7 +13,7 @@ from allmydata.util import deferredutil, fileutil, idlib, mathutil, testutil
from allmydata.scripts import runner
from allmydata.interfaces import IDirectoryNode, IFileNode, IFileURI
from allmydata.mutable import NotMutableError
-from foolscap.eventual import fireEventually, flushEventualQueue
+from foolscap.eventual import flushEventualQueue
from foolscap import DeadReferenceError
from twisted.python import log
from twisted.python.failure import Failure
|
from foolscap . eventual import fireEventually , flushEventualQueue
|
from foolscap . eventual import flushEventualQueue
|
SINGLE_STMT
|
[["Delete", ["identifier:fireEventually", 3, 31, 3, 45]], ["Delete", ["dotted_name", 3, 31, 3, 45]], ["Delete", [",:,", 3, 45, 3, 46]]]
|
matplotlib
|
75f6f9654ae60e7322e8d39f67a7322a31867e58
|
63c6868de962e6d00ae2b7129fcb6bad7d07e40f
|
examples/animation/animation_blit_qt4.py
|
https://github.com/Zsailer/matplotlib
| true
| false
| true
|
@@ -46,7 +46,7 @@ class BlitQT(FigureCanvas):
self.draw()
self.ax_background = self.copy_from_bbox(self.ax.bbox)
- self.restore_region(self.ax_background, bbox=self.ax.bbox)
+ self.restore_region(self.ax_background)
# update the data
self.sin_line.set_ydata(np.sin(self.x+self.cnt/10.0))
|
self . restore_region ( self . ax_background , bbox = self . ax . bbox )
|
self . restore_region ( self . ax_background )
|
SAME_FUNCTION_LESS_ARGS
|
[["Delete", [",:,", 3, 47, 3, 48]], ["Delete", ["identifier:bbox", 3, 49, 3, 53]], ["Delete", ["=:=", 3, 53, 3, 54]], ["Delete", ["identifier:self", 3, 54, 3, 58]], ["Delete", [".:.", 3, 58, 3, 59]], ["Delete", ["identifier:ax", 3, 59, 3, 61]], ["Delete", ["attribute", 3, 54, 3, 61]], ["Delete", [".:.", 3, 61, 3, 62]], ["Delete", ["identifier:bbox", 3, 62, 3, 66]], ["Delete", ["attribute", 3, 54, 3, 66]], ["Delete", ["keyword_argument", 3, 49, 3, 66]]]
|
freenas
|
e5a7fef19f4e616743d3be2da346c19de135a903
|
ec350883949da1fbc0c41fd1dbcb59130e26fd1d
|
src/middlewared/middlewared/plugins/crypto.py
|
https://github.com/yaplej/freenas
| true
| false
| true
|
@@ -741,7 +741,7 @@ class CertificateService(CRUDService):
))
if not os.path.exists(root_path):
- os.mkdir(root_path, 0o755)
+ os.makedirs(root_path, 0o755, exist_ok=True)
def cert_issuer(cert):
issuer = None
|
os . mkdir ( root_path , 0o755 )
|
os . makedirs ( root_path , 0o755 , exist_ok = True )
|
SINGLE_STMT
|
[["Update", ["identifier:mkdir", 3, 16, 3, 21], "makedirs"], ["Insert", ["argument_list", 3, 21, 3, 39], [",:,", "T"], 4], ["Insert", ["argument_list", 3, 21, 3, 39], ["keyword_argument", "N0"], 5], ["Insert", "N0", ["identifier:exist_ok", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["true:True", "T"], 2]]
|
cleverhans
|
562ef62797169f09782619be9d20cecbef5cb983
|
bd74247020ed57ae34f6fef55215facb8669bbbc
|
examples/ex_cifar10_tf.py
|
https://github.com/ftramer/cleverhans
| true
| false
| false
|
@@ -22,7 +22,7 @@ flags.DEFINE_string('train_dir', '/tmp', 'Directory storing the saved model.')
flags.DEFINE_string('filename', 'cifar10.ckpt', 'Filename to save model under.')
flags.DEFINE_integer('nb_epochs', 10, 'Number of epochs to train model')
flags.DEFINE_integer('batch_size', 128, 'Size of training batches')
-flags.DEFINE_float('learning_rate', 0.1, 'Learning rate for training')
+flags.DEFINE_float('learning_rate', 0.001, 'Learning rate for training')
def data_cifar10():
|
flags . DEFINE_float ( 'learning_rate' , 0.1 , 'Learning rate for training' )
|
flags . DEFINE_float ( 'learning_rate' , 0.001 , 'Learning rate for training' )
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["float:0.1", 3, 37, 3, 40], "0.001"]]
|
l10n-brazil
|
25540db5bc35d4763efa6b1da7d423f4b6d3a70f
|
c94c486760aff39c48b2c3b6c00cb39e763e9f73
|
finan/reports/finan_relatorio_fluxo_caixa.py
|
https://github.com/thinkopensolutions/l10n-brazil
| true
| false
| true
|
@@ -103,7 +103,7 @@ class FinanRelatorioFluxoCaixa(ReportXlsxBase):
titulo = data_atual.strftime('%d/%m/%Y')
data_periodo = 'valor_' + str(data_atual).replace('-', '_')
report_data['titulo_data_periodo'][data_periodo] = titulo
- data_atual += relativedelta(dias=1)
+ data_atual += relativedelta(days=1)
elif self.report_wizard.periodo == 'semanas':
data_atual = data_inicial
|
data_atual += relativedelta ( dias = 1 )
|
data_atual += relativedelta ( days = 1 )
|
CHANGE_KEYWORD_ARGUMENT_USED
|
[["Update", ["identifier:dias", 3, 45, 3, 49], "days"]]
|
zipline
|
b907b2557ef952f6b31ce6d0f3962a179d024669
|
c8e58f33625de8aae68cd333b7ff5d208b2bb30c
|
zipline/testing/fixtures.py
|
https://github.com/sushain/zipline
| true
| false
| true
|
@@ -720,7 +720,7 @@ class WithEquityDailyBarData(WithTradingEnvironment):
else:
return create_daily_bar_data(
cls.equity_daily_bar_days,
- cls.asset_finder.sids,
+ cls.asset_finder.equities_sids,
)
@classmethod
|
create_daily_bar_data ( cls . equity_daily_bar_days , cls . asset_finder . sids , )
|
create_daily_bar_data ( cls . equity_daily_bar_days , cls . asset_finder . equities_sids , )
|
CHANGE_ATTRIBUTE_USED
|
[["Update", ["identifier:sids", 3, 34, 3, 38], "equities_sids"]]
|
pygraphistry
|
6603d277eb84712a907008bad8bc8156ed9bdf40
|
afeb15c8ee9c46b71345d500caf3c59cf85ff6f1
|
setup.py
|
https://github.com/barseghyanartur/pygraphistry
| true
| false
| false
|
@@ -34,7 +34,7 @@ setup(
'Intended Audience :: Graphistry User',
'Topic :: Data Visualization Development :: Load Tools',
- 'License :: OSI Approved :: MIT License',
+ 'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
|
'License :: OSI Approved :: MIT License' ,
|
'License :: OSI Approved :: BSD License' ,
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'License :: OSI Approved :: MIT License'", 3, 9, 3, 49], "'License :: OSI Approved :: BSD License'"]]
|
incubator-superset
|
6fe93e18c79eeab470f0297014ad9453552e31aa
|
c988080990691eceb7a78e6448400a519ba4bb0b
|
superset/connectors/sqla/models.py
|
https://github.com/neilchencn/incubator-superset
| true
| false
| true
|
@@ -598,7 +598,7 @@ class SqlaTable(Model, BaseDatasource):
db_dialect = self.database.get_sqla_engine().dialect
for col in table.columns:
try:
- datatype = "{}".format(col.type).upper()
+ datatype = "{}".format(col.type.compile(dialect=db_dialect)).upper()
except Exception as e:
datatype = "UNKNOWN"
logging.error(
|
datatype = "{}" . format ( col . type ) . upper ( )
|
datatype = "{}" . format ( col . type . compile ( dialect = db_dialect ) ) . upper ( )
|
ADD_METHOD_CALL
|
[["Insert", ["argument_list", 3, 39, 3, 49], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 39, 3, 49], ["):)", "T"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["attribute", 3, 40, 3, 48], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:compile", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["keyword_argument", "N3"], 1], ["Move", "N2", ["):)", 3, 48, 3, 49], 2], ["Insert", "N3", ["identifier:dialect", "T"], 0], ["Insert", "N3", ["=:=", "T"], 1], ["Insert", "N3", ["identifier:db_dialect", "T"], 2]]
|
reformedacademy
|
51d8cf90c88613f6bd7d878e3f35b7ce612f8d58
|
a0d9be4d5d01d2924c87e6acfddf5c1a3398b3f8
|
reformedacademy/models.py
|
https://github.com/monty5811/reformedacademy
| true
| false
| true
|
@@ -307,7 +307,7 @@ class CourseProgress(models.Model):
def complete(self):
"""Marks this course as complete."""
- self.completed = True
+ self.completed = timezone.now()
self.save()
def __unicode__(self):
|
self . completed = True
|
self . completed = timezone . now ( )
|
SINGLE_STMT
|
[["Insert", ["assignment", 3, 9, 3, 30], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["identifier:timezone", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:now", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["):)", "T"], 1], ["Delete", ["true:True", 3, 26, 3, 30]]]
|
scikit-optimize
|
020f0e15046dc57cbac44cbe729c251ae61c77b6
|
8db972d56bee115a558dd9d67615403d360b031a
|
setup.py
|
https://github.com/Hvass-Labs/scikit-optimize
| true
| false
| false
|
@@ -12,6 +12,6 @@ setup(name='scikit-optimize',
author='The scikit-optimize contributors',
packages=['skopt', 'skopt.learning', 'skopt.optimizer', 'skopt.space',
'skopt.learning.gaussian_process'],
- install_requires=["numpy", "scipy", "scikit-learn>=0.18",
+ install_requires=["numpy", "scipy>=0.14.0", "scikit-learn>=0.18",
"scikit-garden", "matplotlib"]
)
|
install_requires = [ "numpy" , "scipy" , "scikit-learn>=0.18" , "scikit-garden" , "matplotlib" ]
|
install_requires = [ "numpy" , "scipy>=0.14.0" , "scikit-learn>=0.18" , "scikit-garden" , "matplotlib" ]
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"scipy\"", 3, 34, 3, 41], "\"scipy>=0.14.0\""]]
|
saleor
|
896cda3cd3e03f5d47e301492904be5de5ea6f26
|
a1bb298880ccf9062414a0de2d3ec5adf284b009
|
saleor/graphql/core/mutations.py
|
https://github.com/alexredondosk8/saleor
| true
| false
| false
|
@@ -44,7 +44,7 @@ class ModelMutationOptions(MutationOptions):
class BaseMutation(graphene.Mutation):
errors = graphene.List(
- Error,
+ graphene.NonNull(Error),
description='List of errors that occurred executing the mutation.')
class Meta:
|
errors = graphene . List ( Error , description = 'List of errors that occurred executing the mutation.' )
|
errors = graphene . List ( graphene . NonNull ( Error ) , description = 'List of errors that occurred executing the mutation.' )
|
ADD_FUNCTION_AROUND_EXPRESSION
|
[["Insert", ["argument_list", 2, 27, 4, 76], ["call", "N0"], 1], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["identifier:graphene", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:NonNull", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["identifier:Error", 3, 9, 3, 14], 1], ["Insert", "N2", ["):)", "T"], 2]]
|
pyxnat
|
d010660a1f7b34773a512cdee6f3149bca0ac105
|
9771722e497cc7b0efedc1f5c202e95bb8fa82b9
|
pyxnat/tests/xpath_test.py
|
https://github.com/neurodebian/pyxnat
| true
| false
| true
|
@@ -17,7 +17,7 @@ def test_values():
assert 'OAS1_0002' in central.xpath.values('ID')
def test_element_attrs():
- assert isinstance(central.xpath.element_attrs('fs:region')[0], dict)
+ assert isinstance(central.xpath.element_attrs('fs:region'), list)
assert set(['SegId', 'hemisphere', 'name']).issubset(
central.xpath.element_keys('fs:region'))
|
assert isinstance ( central . xpath . element_attrs ( 'fs:region' ) [ 0 ] , dict )
|
assert isinstance ( central . xpath . element_attrs ( 'fs:region' ) , list )
|
SINGLE_STMT
|
[["Move", ["argument_list", 3, 22, 3, 73], ["call", 3, 23, 3, 63], 1], ["Update", ["identifier:dict", 3, 68, 3, 72], "list"], ["Delete", ["[:[", 3, 63, 3, 64]], ["Delete", ["integer:0", 3, 64, 3, 65]], ["Delete", ["]:]", 3, 65, 3, 66]], ["Delete", ["subscript", 3, 23, 3, 66]]]
|
jrnl
|
60bcac8d555d358e189dadbab4812b00fb9cbbbb
|
b88180fbce77521aa67badc7db329b06d70c10bc
|
docs/conf.py
|
https://github.com/mengjues/jrnl
| true
| false
| false
|
@@ -44,7 +44,7 @@ master_doc = 'index'
# General information about the project.
project = u'jrnl'
-copyright = u'Journal is made with love by <a href="http://www.1450.me">Manuel Ebert</a> and <a href="https://github.com/maebert/jrnl/graphs/contributors" title="Contributtors">other fabulous people</a>. If you need help, tweet to <a href="https://twitter.com/maebert" title="Follow @maebert on twitter">@maebert</a> or <a href="https://github.com/maebert/jrnl/issues/new" title="Open a new issue on Github">submit an issue</a> on Github.'
+copyright = u'jrnl is made with love by <a href="http://www.1450.me">Manuel Ebert</a> and <a href="https://github.com/maebert/jrnl/graphs/contributors" title="Contributors">other fabulous people</a>. If you need help, tweet to <a href="https://twitter.com/maebert" title="Follow @maebert on twitter">@maebert</a> or <a href="https://github.com/maebert/jrnl/issues/new" title="Open a new issue on Github">submit an issue</a> on Github.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
|
copyright = u'Journal is made with love by <a href="http://www.1450.me">Manuel Ebert</a> and <a href="https://github.com/maebert/jrnl/graphs/contributors" title="Contributtors">other fabulous people</a>. If you need help, tweet to <a href="https://twitter.com/maebert" title="Follow @maebert on twitter">@maebert</a> or <a href="https://github.com/maebert/jrnl/issues/new" title="Open a new issue on Github">submit an issue</a> on Github.'
|
copyright = u'jrnl is made with love by <a href="http://www.1450.me">Manuel Ebert</a> and <a href="https://github.com/maebert/jrnl/graphs/contributors" title="Contributors">other fabulous people</a>. If you need help, tweet to <a href="https://twitter.com/maebert" title="Follow @maebert on twitter">@maebert</a> or <a href="https://github.com/maebert/jrnl/issues/new" title="Open a new issue on Github">submit an issue</a> on Github.'
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:u'Journal is made with love by <a href=\"http://www.1450.me\">Manuel Ebert</a> and <a href=\"https://github.com/maebert/jrnl/graphs/contributors\" title=\"Contributtors\">other fabulous people</a>. If you need help, tweet to <a href=\"https://twitter.com/maebert\" title=\"Follow @maebert on twitter\">@maebert</a> or <a href=\"https://github.com/maebert/jrnl/issues/new\" title=\"Open a new issue on Github\">submit an issue</a> on Github.'", 3, 13, 3, 440], "u'jrnl is made with love by <a href=\"http://www.1450.me\">Manuel Ebert</a> and <a href=\"https://github.com/maebert/jrnl/graphs/contributors\" title=\"Contributors\">other fabulous people</a>. If you need help, tweet to <a href=\"https://twitter.com/maebert\" title=\"Follow @maebert on twitter\">@maebert</a> or <a href=\"https://github.com/maebert/jrnl/issues/new\" title=\"Open a new issue on Github\">submit an issue</a> on Github.'"]]
|
salt
|
b72adf27914496b24f4ef98658c1d44955f7266d
|
fed58155bca59be5d689039e8e962bb56b3ac844
|
salt/modules/iptables.py
|
https://github.com/Roche/salt
| true
| false
| false
|
@@ -141,7 +141,7 @@ def build_rule(table=None, chain=None, command=None, position='', full=None, fam
rule = ''
proto = False
- bang_not_pat = re.compile(r'[!,not]\s?')
+ bang_not_pat = re.compile(r'[!|not]\s?')
if 'if' in kwargs:
if kwargs['if'].startswith('!') or kwargs['if'].startswith('not'):
|
bang_not_pat = re . compile ( r'[!,not]\s?' )
|
bang_not_pat = re . compile ( r'[!|not]\s?' )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:r'[!,not]\\s?'", 3, 31, 3, 44], "r'[!|not]\\s?'"]]
|
Theano
|
70f0b2ebc7925c48601825c855d134eaa1b65977
|
9d6615eb31cc52a9d0bf071925f65cba16c70ae1
|
theano/scan_module/scan_utils.py
|
https://github.com/vdumoulin/Theano
| true
| false
| true
|
@@ -156,7 +156,7 @@ def traverse(out, x, x_copy, d, visited=None):
d[out] = cuda.gpu_from_host(x_copy)
else:
assert isinstance(x.type, gpuarray.GpuArrayType)
- d[out] = gpuarray.gpu_from_host(x_copy)
+ d[out] = gpuarray.GpuFromHost(x.type.context_name)(x_copy)
return d
elif out.owner is None:
return d
|
d [ out ] = gpuarray . gpu_from_host ( x_copy )
|
d [ out ] = gpuarray . GpuFromHost ( x . type . context_name ) ( x_copy )
|
SINGLE_STMT
|
[["Insert", ["call", 3, 22, 3, 52], ["call", "N0"], 0], ["Move", "N0", ["attribute", 3, 22, 3, 44], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Update", ["identifier:gpu_from_host", 3, 31, 3, 44], "GpuFromHost"], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["attribute", "N2"], 1], ["Insert", "N1", ["):)", "T"], 2], ["Insert", "N2", ["attribute", "N3"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:context_name", "T"], 2], ["Insert", "N3", ["identifier:x", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:type", "T"], 2]]
|
bupt-messager
|
a611b068c3a1ef6e8e36de7be26c86c2730464f3
|
d9cb9a8823e0173dc6d6d0a8edd94e3bf5aa8d79
|
bupt_messager/bot_handler/bot_backend.py
|
https://github.com/Berailitz/bupt-messager
| true
| false
| true
|
@@ -38,7 +38,7 @@ class BotBackend(object):
args = self.backend_helper.prase_callback(update)
length = try_int(args[0]) if args else 1
start = try_int(args[1]) if args[1:] else 0
- self.backend_helper.send_latest_notice(bot=bot, message=update.message, length=length, start=start)
+ self.backend_helper.send_latest_notice(bot=bot, message=update.callback_query.message, length=length, start=start)
@staticmethod
def yo_command(bot, update):
|
self . backend_helper . send_latest_notice ( bot = bot , message = update . message , length = length , start = start )
|
self . backend_helper . send_latest_notice ( bot = bot , message = update . callback_query . message , length = length , start = start )
|
ADD_ATTRIBUTE_ACCESS
|
[["Insert", ["attribute", 3, 65, 3, 79], ["attribute", "N0"], 0], ["Insert", ["attribute", 3, 65, 3, 79], [".:.", "T"], 1], ["Move", "N0", ["identifier:update", 3, 65, 3, 71], 0], ["Move", "N0", [".:.", 3, 71, 3, 72], 1], ["Insert", "N0", ["identifier:callback_query", "T"], 2]]
|
machine-learning
|
1fea68d2bdfd03084b7d145677bd38e6ded611b4
|
049f6ad2b9a31de2aa09c270e8c727b3ec7b5e52
|
learnPreprocessed.py
|
https://github.com/clu-star/machine-learning
| true
| false
| true
|
@@ -145,7 +145,7 @@ def train(splits,numsplits):
for j in range(0,len(splits[i])): # for each img in split
# detect features with SIFT
descriptors = splits[i][j].cvdata
- keypoints = [splits[i][j].auxdata[0],splits[i][j].auxdata[1]]
+ keyPoints = [splits[i][j].auxdata[0],splits[i][j].auxdata[1]]
keyClasses = []
# classify the features based on if they are located
# where we expect nodules to be
|
keypoints = [ splits [ i ] [ j ] . auxdata [ 0 ] , splits [ i ] [ j ] . auxdata [ 1 ] ]
|
keyPoints = [ splits [ i ] [ j ] . auxdata [ 0 ] , splits [ i ] [ j ] . auxdata [ 1 ] ]
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:keypoints", 3, 6, 3, 15], "keyPoints"]]
|
horizon
|
42924c1fa46c2d3aafaba07e98883d48fb4566df
|
e8bd8f76c16dd88d85439464ec46270fc237df77
|
openstack_dashboard/dashboards/idm/home_orgs/urls.py
|
https://github.com/NozomiNetworks/horizon
| true
| false
| false
|
@@ -19,6 +19,6 @@ from openstack_dashboard.dashboards.idm.home_orgs import views
urlpatterns = patterns('',
url(r'^$', views.IndexView.as_view(), name='index'),
- url(r'^/edit/members/$',
+ url(r'^edit/members/$',
views.OrganizationMembersView.as_view(), name='members'),
)
|
urlpatterns = patterns ( '' , url ( r'^$' , views . IndexView . as_view ( ) , name = 'index' ) , url ( r'^/edit/members/$' , views . OrganizationMembersView . as_view ( ) , name = 'members' ) , )
|
urlpatterns = patterns ( '' , url ( r'^$' , views . IndexView . as_view ( ) , name = 'index' ) , url ( r'^edit/members/$' , views . OrganizationMembersView . as_view ( ) , name = 'members' ) , )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:r'^/edit/members/$'", 3, 9, 3, 28], "r'^edit/members/$'"]]
|
salt
|
bfa756cc9ea2b728b1356a8e0d08733a1424d7cb
|
4b0c342f7186de16edef62f39d51b401983beefe
|
salt/states/pip_state.py
|
https://github.com/fake-name/salt
| true
| false
| false
|
@@ -433,7 +433,7 @@ def installed(name,
if requirements or editable:
comments = []
if requirements:
- for eachline in pip_install_call.get('stdout').split('\n'):
+ for eachline in pip_install_call.get('stdout', '').split('\n'):
if not eachline.startswith('Requirement already satisfied') and eachline != 'Cleaning up...':
ret['changes']['requirements'] = True
if ret['changes'].get('requirements'):
|
for eachline in pip_install_call . get ( 'stdout' ) . split ( '\n' ) : if not eachline . startswith ( 'Requirement already satisfied' ) and eachline != 'Cleaning up...' : ret [ 'changes' ] [ 'requirements' ] = True
|
for eachline in pip_install_call . get ( 'stdout' , '' ) . split ( '\n' ) : if not eachline . startswith ( 'Requirement already satisfied' ) and eachline != 'Cleaning up...' : ret [ 'changes' ] [ 'requirements' ] = True
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 53, 3, 63], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 53, 3, 63], ["string:''", "T"], 3]]
|
skutil
|
b4b0baec769fe75a5731584bdaa218fd4d5e7062
|
87becd188b6dd7a43bd8531f9f3779734d3d7e69
|
skutil/h2o/tests/test_selectors.py
|
https://github.com/tgsmith61591/skutil
| true
| false
| true
|
@@ -26,7 +26,7 @@ def catch_warning_assert_thrown(fun, kwargs):
warnings.simplefilter("always")
ret = fun(**kwargs)
- assert len(w) > 0 if not X else True, 'expected warning to be thrown'
+ assert len(w) > 0 if X is None else True, 'expected warning to be thrown'
return ret
|
assert len ( w ) > 0 if not X else True , 'expected warning to be thrown'
|
assert len ( w ) > 0 if X is None else True , 'expected warning to be thrown'
|
SINGLE_STMT
|
[["Insert", ["conditional_expression", 3, 10, 3, 39], ["comparison_operator", "N0"], 2], ["Move", "N0", ["identifier:X", 3, 28, 3, 29], 0], ["Insert", "N0", ["is:is", "T"], 1], ["Insert", "N0", ["none:None", "T"], 2], ["Delete", ["not:not", 3, 24, 3, 27]], ["Delete", ["not_operator", 3, 24, 3, 29]]]
|
cpython
|
48e82e5f2939f17ca7511e78273e9a0043a2d9c4
|
f008e266c89917a5e838cfdc29381e2eb3fe5328
|
Lib/test/test_tcl.py
|
https://github.com/laiy/cpython
| true
| false
| true
|
@@ -163,7 +163,7 @@ class TclTest(unittest.TestCase):
self.assertEqual(passValue(u'string\u20ac'), u'string\u20ac')
for i in (0, 1, -1, int(2**31-1), int(-2**31)):
self.assertEqual(passValue(i), i)
- for f in (0.0, 1.0, -1.0, 1/3,
+ for f in (0.0, 1.0, -1.0, 1//3, 1/3.0,
sys.float_info.min, sys.float_info.max,
-sys.float_info.min, -sys.float_info.max):
self.assertEqual(passValue(f), f)
|
for f in ( 0.0 , 1.0 , - 1.0 , 1 / 3 , sys . float_info . min , sys . float_info . max , - sys . float_info . min , - sys . float_info . max ) : self . assertEqual ( passValue ( f ) , f )
|
for f in ( 0.0 , 1.0 , - 1.0 , 1 // 3 , 1 / 3.0 , sys . float_info . min , sys . float_info . max , - sys . float_info . min , - sys . float_info . max ) : self . assertEqual ( passValue ( f ) , f )
|
SINGLE_STMT
|
[["Insert", ["tuple", 3, 18, 5, 60], ["binary_operator", "N0"], 9], ["Insert", ["tuple", 3, 18, 5, 60], [",:,", "T"], 10], ["Insert", ["binary_operator", 3, 35, 3, 38], ["//://", "T"], 1], ["Insert", "N0", ["integer:1", "T"], 0], ["Insert", "N0", ["/:/", "T"], 1], ["Insert", "N0", ["float:3.0", "T"], 2], ["Delete", ["/:/", 3, 36, 3, 37]]]
|
WebRtcClone
|
f94e3f1dc3947299cb001093a17ba87e81f68fe8
|
80a9ef1e126c0f65388a771877b799a3cd6933c1
|
scm.py
|
https://github.com/yangpenglin/WebRtcClone
| true
| false
| true
|
@@ -226,7 +226,7 @@ class GIT(object):
# pipe at a time.
# The -100 is an arbitrary limit so we don't search forever.
cmd = ['git', 'log', '-100', '--pretty=medium']
- proc = subprocess2.Popen(cmd, cwd, stdout=subprocess2.PIPE)
+ proc = subprocess2.Popen(cmd, cwd=cwd, stdout=subprocess2.PIPE)
url = None
for line in proc.stdout:
match = git_svn_re.match(line)
|
proc = subprocess2 . Popen ( cmd , cwd , stdout = subprocess2 . PIPE )
|
proc = subprocess2 . Popen ( cmd , cwd = cwd , stdout = subprocess2 . PIPE )
|
SINGLE_STMT
|
[["Insert", ["argument_list", 3, 29, 3, 64], ["keyword_argument", "N0"], 3], ["Move", "N0", ["identifier:cwd", 3, 35, 3, 38], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["identifier:cwd", "T"], 2]]
|
SQLMAP
|
9a4f8d5f45a7a34516e9f5d62d4ae648f0905cef
|
0702dd70b59437449bb5265e5b64adcdaaeb0949
|
lib/techniques/union/test.py
|
https://github.com/AnnonimityX/SQLMAP
| true
| false
| true
|
@@ -262,7 +262,7 @@ def __unionTestByCharBruteforce(comment, place, parameter, value, prefix, suffix
warnMsg += "usage of option '--union-char' "
warnMsg += "(e.g. --union-char=1) "
else:
- kb.uChar = str(randomInt(2))
+ conf.uChar = kb.uChar = str(randomInt(2))
validPayload, vector = __unionConfirm(comment, place, parameter, prefix, suffix, count)
if not conf.dbms:
|
else : kb . uChar = str ( randomInt ( 2 ) )
|
else : conf . uChar = kb . uChar = str ( randomInt ( 2 ) )
|
SINGLE_STMT
|
[["Insert", ["assignment", 2, 17, 3, 49], ["assignment", "N0"], 4], ["Insert", ["type", 3, 21, 3, 29], ["attribute", "N1"], 0], ["Move", "N0", ["attribute", 3, 21, 3, 29], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Move", "N0", ["call", 3, 32, 3, 49], 2], ["Insert", "N1", ["identifier:conf", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:uChar", "T"], 2]]
|
dyschord
|
cebb031a7739ccb90d0dc61a28391a6b463f9a00
|
1e5d64dab2bb077f3df871ac9d69600e682238ff
|
dyschord/node.py
|
https://github.com/afoglia/dyschord
| true
| false
| true
|
@@ -451,7 +451,7 @@ class Node(MutableMapping) :
self.logger.debug("Setting up node with initial fingers: %s",
[(finger.id, getattr(finger, "url", None))
for finger in fingers.values()])
- self.fingers = fingers.values()
+ self.fingers = [fingers[step] for step in self.finger_steps]
with self.data_lock.wrlocked() :
self.logger.debug("Setting up node with data: %s", data)
self.data.update(data)
|
self . fingers = fingers . values ( )
|
self . fingers = [ fingers [ step ] for step in self . finger_steps ]
|
SINGLE_STMT
|
[["Insert", ["assignment", 3, 7, 3, 38], ["list_comprehension", "N0"], 2], ["Insert", "N0", ["[:[", "T"], 0], ["Insert", "N0", ["subscript", "N1"], 1], ["Insert", "N0", ["for_in_clause", "N2"], 2], ["Insert", "N0", ["]:]", "T"], 3], ["Move", "N1", ["identifier:fingers", 3, 22, 3, 29], 0], ["Insert", "N1", ["[:[", "T"], 1], ["Update", ["identifier:values", 3, 30, 3, 36], "step"], ["Move", "N1", ["identifier:values", 3, 30, 3, 36], 2], ["Insert", "N1", ["]:]", "T"], 3], ["Insert", "N2", ["for:for", "T"], 0], ["Insert", "N2", ["identifier:step", "T"], 1], ["Insert", "N2", ["in:in", "T"], 2], ["Insert", "N2", ["attribute", "N3"], 3], ["Insert", "N3", ["identifier:self", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:finger_steps", "T"], 2], ["Delete", [".:.", 3, 29, 3, 30]], ["Delete", ["attribute", 3, 22, 3, 36]], ["Delete", ["(:(", 3, 36, 3, 37]], ["Delete", ["):)", 3, 37, 3, 38]], ["Delete", ["argument_list", 3, 36, 3, 38]], ["Delete", ["call", 3, 22, 3, 38]]]
|
Veil-Evasion
|
069789d777623d8d7c437a515f8162449b064946
|
b3ce1612676ab028ed7ef174765a5a7c76a5422f
|
modules/common/controller.py
|
https://github.com/ninions/Veil-Evasion
| true
| false
| true
|
@@ -378,7 +378,7 @@ class Controller:
# if we get .exe code back, output to the compiled folder, otherwise write to the source folder
- if payload.extension == "exe":
+ if payload.extension == "exe" or payload.extension == "war":
outputFolder = settings.PAYLOAD_COMPILED_PATH
else:
outputFolder = settings.PAYLOAD_SOURCE_PATH
|
if payload . extension == "exe" : outputFolder = settings . PAYLOAD_COMPILED_PATH else : outputFolder = settings . PAYLOAD_SOURCE_PATH
|
if payload . extension == "exe" or payload . extension == "war" : outputFolder = settings . PAYLOAD_COMPILED_PATH else : outputFolder = settings . PAYLOAD_SOURCE_PATH
|
LESS_SPECIFIC_IF
|
[["Insert", ["if_statement", 2, 9, 5, 56], ["boolean_operator", "N0"], 1], ["Move", "N0", ["comparison_operator", 2, 12, 2, 38], 0], ["Insert", "N0", ["or:or", "T"], 1], ["Insert", "N0", ["comparison_operator", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["==:==", "T"], 1], ["Insert", "N1", ["string:\"war\"", "T"], 2], ["Insert", "N2", ["identifier:payload", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:extension", "T"], 2]]
|
ganeti
|
7b80424fbcbf46b2753ee619814ba6fb554ae6ee
|
c4ed32cb662cf0437124568ccb7596846542d661
|
test/ganeti.hooks_unittest.py
|
https://github.com/tvbeat/ganeti
| true
| false
| true
|
@@ -182,7 +182,7 @@ class TestHooksRunner(unittest.TestCase):
os.symlink("/usr/bin/env", fname)
self.torm.append((fname, False))
env_snt = {"PHASE": phase}
- env_exp = "PHASE=%s\n" % phase
+ env_exp = "PHASE=%s" % phase
self.failUnlessEqual(self.hr.RunHooks(self.hpath, phase, env_snt),
[(self._rname(fname), HKR_SUCCESS, env_exp)])
|
env_exp = "PHASE=%s\n" % phase
|
env_exp = "PHASE=%s" % phase
|
CHANGE_BINARY_OPERAND
|
[["Update", ["string:\"PHASE=%s\\n\"", 3, 17, 3, 29], "\"PHASE=%s\""]]
|
munki
|
d895a4c106d08d50fd3ac775349fac26b3867f78
|
bd83d5381c488ae4faf261a66af9c8dc685e8d37
|
code/client/munkilib/updatecheck.py
|
https://github.com/zentralopensource/munki
| true
| false
| true
|
@@ -1037,7 +1037,7 @@ def lookForUpdates(manifestitem, cataloglist, installinfo):
update_items = [catalogitem['name']
for catalogitem in updaters
if (name in catalogitem.get('update_for',[]) or
- nameAndVersion in
+ nameWithVersion in
catalogitem.get('update_for',[]))]
if update_items:
update_list.extend(update_items)
|
update_items = [ catalogitem [ 'name' ] for catalogitem in updaters if ( name in catalogitem . get ( 'update_for' , [ ] ) or nameAndVersion in catalogitem . get ( 'update_for' , [ ] ) ) ]
|
update_items = [ catalogitem [ 'name' ] for catalogitem in updaters if ( name in catalogitem . get ( 'update_for' , [ ] ) or nameWithVersion in catalogitem . get ( 'update_for' , [ ] ) ) ]
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:nameAndVersion", 3, 33, 3, 47], "nameWithVersion"]]
|
readthedocs.org
|
5591eb5bd7111597f7431b74819f873c1bef7158
|
a1f70a041cba3e7d2990a09f0e91c41837f7c672
|
readthedocs/urls.py
|
https://github.com/quintshekhar/readthedocs.org
| true
| false
| false
|
@@ -86,7 +86,7 @@ groups = [basic_urls, rtd_urls, project_urls, api_urls, core_urls, i18n_urls,
if 'readthedocsext.donate' in settings.INSTALLED_APPS:
# Include donation URL's
groups.append([
- url(r'^sustainability/', include('readthedocs.donate.urls')),
+ url(r'^sustainability/', include('readthedocsext.donate.urls')),
url(r'^accounts/gold/', include('readthedocs.gold.urls')),
])
if not getattr(settings, 'USE_SUBDOMAIN', False) or settings.DEBUG:
|
groups . append ( [ url ( r'^sustainability/' , include ( 'readthedocs.donate.urls' ) ) , url ( r'^accounts/gold/' , include ( 'readthedocs.gold.urls' ) ) , ] )
|
groups . append ( [ url ( r'^sustainability/' , include ( 'readthedocsext.donate.urls' ) ) , url ( r'^accounts/gold/' , include ( 'readthedocs.gold.urls' ) ) , ] )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'readthedocs.donate.urls'", 3, 42, 3, 67], "'readthedocsext.donate.urls'"]]
|
lowerpines
|
0d7d0d1229d68bbe9366f1b9afd249b019034b54
|
59881371f36c896f50e965a40aabc754b99e05b6
|
lowerpines/endpoints/message.py
|
https://github.com/Bigfootjon/lowerpines
| true
| false
| false
|
@@ -15,7 +15,7 @@ class Message(AbstractObject, RetrievableObject):
text = Field()
system = Field()
favorited_by = Field()
- attachments = Field()
+ attachments = Field(handler=dict)
sender_type = Field()
sender_id = Field()
|
attachments = Field ( )
|
attachments = Field ( handler = dict )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 24, 3, 26], ["keyword_argument", "N0"], 1], ["Insert", "N0", ["identifier:handler", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["identifier:dict", "T"], 2]]
|
faraday
|
414a0f302b63475fc8cbe26be521e6a0e2e20744
|
040478ed94f9a26ca7fe1cffb8acaa01570aaacd
|
faraday/server/api/modules/export_data.py
|
https://github.com/ru-faraon/faraday
| true
| false
| true
|
@@ -196,7 +196,7 @@ def _build_vuln_web_element(vuln, vuln_tag):
def map_severity(severity):
risk = '1'
- if severity == 'critical' or severity == 'high':
+ if severity in ['high', 'critical']:
risk = '5'
elif severity == 'medium':
risk = '4'
|
if severity == 'critical' or severity == 'high' : risk = '5' elif severity == 'medium' : risk = '4'
|
if severity in [ 'high' , 'critical' ] : risk = '5' elif severity == 'medium' : risk = '4'
|
SINGLE_STMT
|
[["Insert", ["if_statement", 3, 5, 6, 19], ["comparison_operator", "N0"], 1], ["Move", "N0", ["identifier:severity", 3, 8, 3, 16], 0], ["Insert", "N0", ["in:in", "T"], 1], ["Insert", "N0", ["list", "N1"], 2], ["Insert", "N1", ["[:[", "T"], 0], ["Insert", "N1", ["string:'high'", "T"], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["string:'critical'", "T"], 3], ["Insert", "N1", ["]:]", "T"], 4], ["Delete", ["==:==", 3, 17, 3, 19]], ["Delete", ["string:'critical'", 3, 20, 3, 30]], ["Delete", ["comparison_operator", 3, 8, 3, 30]], ["Delete", ["or:or", 3, 31, 3, 33]], ["Delete", ["identifier:severity", 3, 34, 3, 42]], ["Delete", ["==:==", 3, 43, 3, 45]], ["Delete", ["string:'high'", 3, 46, 3, 52]], ["Delete", ["comparison_operator", 3, 34, 3, 52]], ["Delete", ["boolean_operator", 3, 8, 3, 52]]]
|
DIRAC
|
6b0d3a952d2f8dd5e3a6932be9097d80515007fa
|
169901b53e65e1f5290fbab3c0565d023e45fdf7
|
Resources/Computing/SSHComputingElement.py
|
https://github.com/arrabito/DIRAC
| true
| false
| true
|
@@ -374,7 +374,7 @@ class SSHComputingElement( ComputingElement ):
return S_ERROR( 'Failed job submission, reason: %s' % message )
else:
batchIDs = outputLines[1:]
- jobIDs = [ self.ceType.lower()+'://'+submitHost+'/'+id for id in batchIDs ]
+ jobIDs = [ self.ceType.lower()+'://'+self.ceName+'/'+id for id in batchIDs ]
else:
return S_ERROR( '\n'.join( [sshStdout,sshStderr] ) )
|
jobIDs = [ self . ceType . lower ( ) + '://' + submitHost + '/' + id for id in batchIDs ]
|
jobIDs = [ self . ceType . lower ( ) + '://' + self . ceName + '/' + id for id in batchIDs ]
|
CHANGE_BINARY_OPERAND
|
[["Insert", ["binary_operator", 3, 20, 3, 56], ["attribute", "N0"], 2], ["Update", ["identifier:submitHost", 3, 46, 3, 56], "self"], ["Move", "N0", ["identifier:submitHost", 3, 46, 3, 56], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:ceName", "T"], 2]]
|
starbox-erpnext
|
4d68e03a97c84e82bc4d6dbd2cb2d09b4c8cb93a
|
eaec4695f780eeb831c542c28b2430553a56b2d4
|
erpnext/hr/doctype/job_opening/job_opening.py
|
https://github.com/vhrspvl/starbox-erpnext
| true
| false
| true
|
@@ -21,7 +21,7 @@ class JobOpening(WebsiteGenerator):
self.route = frappe.scrub(self.job_title).replace('_', '-')
def get_context(self, context):
- context.parents = [{'name': 'jobs', 'title': _('All Jobs') }]
+ context.parents = [{'route': 'jobs', 'title': _('All Jobs') }]
def get_list_context(context):
context.title = _("Jobs")
|
context . parents = [ { 'name' : 'jobs' , 'title' : _ ( 'All Jobs' ) } ]
|
context . parents = [ { 'route' : 'jobs' , 'title' : _ ( 'All Jobs' ) } ]
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'name'", 3, 23, 3, 29], "'route'"]]
|
cpython
|
665757847eddd2e4b211bd78d964171ba541731c
|
c4fe092bc34d0bed120ecf369684ece4afec6be0
|
Tools/clinic/clinic.py
|
https://github.com/CTSRD-CHERI/cpython
| true
| false
| true
|
@@ -3448,7 +3448,7 @@ class DSLParser:
a.append('=')
value = p.converter.py_default
if not value:
- value = str(p.converter.default)
+ value = repr(p.converter.default)
a.append(value)
s = fix_right_bracket_count(p.right_bracket_count)
s += "".join(a)
|
value = str ( p . converter . default )
|
value = repr ( p . converter . default )
|
WRONG_FUNCTION_NAME
|
[["Update", ["identifier:str", 3, 29, 3, 32], "repr"]]
|
chainer
|
69fc73eca1bf38f3e10e23cefecbb7023724d047
|
3be74f51bbf98a287d3efd2a7932201e27824f94
|
chainer/functions/math/basic_math.py
|
https://github.com/chainer/chainer
| true
| false
| true
|
@@ -193,7 +193,7 @@ class MultiAdd(function_node.FunctionNode):
return xs
y = None
if intel64.should_use_ideep('>=auto'):
- bxs = numpy.broadcast_arrays(*xs)
+ bxs = numpy.broadcast_arrays(xs)
if intel64.inputs_all_ready(bxs):
y = intel64.ideep.multi_add(bxs)
if y is None:
|
bxs = numpy . broadcast_arrays ( * xs )
|
bxs = numpy . broadcast_arrays ( xs )
|
SINGLE_STMT
|
[["Move", ["argument_list", 3, 41, 3, 46], ["identifier:xs", 3, 43, 3, 45], 1], ["Delete", ["*:*", 3, 42, 3, 43]], ["Delete", ["list_splat", 3, 42, 3, 45]]]
|
openNAMU
|
37e2db3846c68678ef4acf926d1e8653027b148d
|
454750a1fe55f1635f73b2a5f2c739de0bbd2deb
|
app.py
|
https://github.com/2du/openNAMU
| true
| false
| true
|
@@ -492,7 +492,7 @@ def image_view(name = None):
def acl_list():
div = '<ul>'
- curs.execute("select title, acl from data where acl = 'admin' or acl = 'user' order by acl desc")
+ curs.execute("select title, dec from acl where dec = 'admin' or dec = 'user' order by title desc")
list_data = curs.fetchall()
for data in list_data:
if(not re.search('^사용자:', data[0]) and not re.search('^파일:', data[0])):
|
curs . execute ( "select title, acl from data where acl = 'admin' or acl = 'user' order by acl desc" )
|
curs . execute ( "select title, dec from acl where dec = 'admin' or dec = 'user' order by title desc" )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"select title, acl from data where acl = 'admin' or acl = 'user' order by acl desc\"", 3, 18, 3, 101], "\"select title, dec from acl where dec = 'admin' or dec = 'user' order by title desc\""]]
|
voc
|
be29e195bbdfd9acad4841d0e241554e735baf3a
|
d6e9714c27615371b71d1ceee00ddbb383ba3404
|
voc/__init__.py
|
https://github.com/shyamsunder007/voc
| true
| false
| false
|
@@ -6,4 +6,4 @@
# __version__ = '1.2.3' # Final Release
# __version__ = '1.2.3.post1' # Post Release 1
-__version__ = '0.1.2'
+__version__ = '0.1.2-dev'
|
__version__ = '0.1.2'
|
__version__ = '0.1.2-dev'
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'0.1.2'", 3, 15, 3, 22], "'0.1.2-dev'"]]
|
proto
|
bae1574fff1d20a9df057ccc59a783180756ce5a
|
a4592d7c474c9cb64374fdaee901a261347a65b7
|
lib/galaxy/tools/parameters/basic.py
|
https://github.com/unioslo/proto
| true
| false
| true
|
@@ -1182,7 +1182,7 @@ class DataToolParameter( ToolParameter ):
return []
def converter_safe( self, other_values, trans ):
- if not hasattr( trans, 'has_multiple_pages' ) or self.tool.has_multiple_pages or not hasattr( trans, 'workflow_building_mode' ) or trans.workflow_building_mode:
+ if self.tool is None or self.tool.has_multiple_pages or not hasattr( trans, 'workflow_building_mode' ) or trans.workflow_building_mode:
return False
if other_values is None:
return True # we don't know other values, so we can't check, assume ok
|
if not hasattr ( trans , 'has_multiple_pages' ) or self . tool . has_multiple_pages or not hasattr ( trans , 'workflow_building_mode' ) or trans . workflow_building_mode : return False
|
if self . tool is None or self . tool . has_multiple_pages or not hasattr ( trans , 'workflow_building_mode' ) or trans . workflow_building_mode : return False
|
SINGLE_STMT
|
[["Move", ["if_statement", 3, 9, 4, 25], ["boolean_operator", 3, 16, 3, 168], 1], ["Insert", ["boolean_operator", 3, 16, 3, 86], ["comparison_operator", "N0"], 0], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["is:is", "T"], 1], ["Insert", "N0", ["none:None", "T"], 2], ["Insert", "N1", ["identifier:self", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:tool", "T"], 2], ["Delete", ["not:not", 3, 12, 3, 15]], ["Delete", ["identifier:hasattr", 3, 16, 3, 23]], ["Delete", ["(:(", 3, 23, 3, 24]], ["Delete", ["identifier:trans", 3, 25, 3, 30]], ["Delete", [",:,", 3, 30, 3, 31]], ["Delete", ["string:'has_multiple_pages'", 3, 32, 3, 52]], ["Delete", ["):)", 3, 53, 3, 54]], ["Delete", ["argument_list", 3, 23, 3, 54]], ["Delete", ["call", 3, 16, 3, 54]], ["Delete", ["not_operator", 3, 12, 3, 168]]]
|
PokemonGo-Bot
|
c331b8cb046cfd0c3e86d07536e5b56e9d755c67
|
b996ab98214cc3edd0217cf76727f8d576a9262d
|
pokecli.py
|
https://github.com/kije/PokemonGo-Bot
| true
| false
| true
|
@@ -66,7 +66,7 @@ def init_config():
parser.add_argument("-c", "--cp",help="Set CP less than to transfer(DEFAULT 100)",type=int,default=100)
parser.add_argument("-k", "--gmapkey",help="Set Google Maps API KEY",type=str,default=None)
parser.add_argument("--maxsteps",help="Set the steps around your initial location(DEFAULT 5 mean 25 cells around your location)",type=int,default=5)
- parser.add_argument("--initial-transfer", help="Transfer all pokemon with same ID on bot start, except pokemon with highest CP. It works with -c", action='store_true', location='initial_transfer')
+ parser.add_argument("--initial-transfer", help="Transfer all pokemon with same ID on bot start, except pokemon with highest CP. It works with -c", action='store_true', dest='initial_transfer')
parser.add_argument("-d", "--debug", help="Debug Mode", action='store_true')
parser.add_argument("-t", "--test", help="Only parse the specified location", action='store_true')
parser.set_defaults(DEBUG=False, TEST=False)
|
parser . add_argument ( "--initial-transfer" , help = "Transfer all pokemon with same ID on bot start, except pokemon with highest CP. It works with -c" , action = 'store_true' , location = 'initial_transfer' )
|
parser . add_argument ( "--initial-transfer" , help = "Transfer all pokemon with same ID on bot start, except pokemon with highest CP. It works with -c" , action = 'store_true' , dest = 'initial_transfer' )
|
CHANGE_KEYWORD_ARGUMENT_USED
|
[["Update", ["identifier:location", 3, 173, 3, 181], "dest"]]
|
eni-frappe
|
ff93ab9fe7e62a5b6fe79901c1864ecb94112ebc
|
9e6eaf5a0bf9606adecae7b43a3131eb793c2552
|
frappe/model/document.py
|
https://github.com/indictranstech/eni-frappe
| true
| false
| true
|
@@ -298,7 +298,7 @@ class Document(BaseDocument):
def validate_higher_perm_levels(self):
"""If the user does not have permissions at permlevel > 0, then reset the values to original / default"""
- if self.flags.ignore_permissions:
+ if self.flags.ignore_permissions or frappe.flags.in_install:
return
self.get_high_permlevel_fields()
|
if self . flags . ignore_permissions : return
|
if self . flags . ignore_permissions or frappe . flags . in_install : return
|
LESS_SPECIFIC_IF
|
[["Insert", ["if_statement", 3, 3, 4, 10], ["boolean_operator", "N0"], 1], ["Move", "N0", ["attribute", 3, 6, 3, 35], 0], ["Insert", "N0", ["or:or", "T"], 1], ["Insert", "N0", ["attribute", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:in_install", "T"], 2], ["Insert", "N2", ["identifier:frappe", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:flags", "T"], 2]]
|
nipype
|
27daccc7ff425949aac6056764cd31aad91be98d
|
a3a62343d97adfa11a1d48ddad461d126e6127aa
|
nipype/interfaces/fsl/epi.py
|
https://github.com/kaczmarj/nipype
| true
| false
| true
|
@@ -293,7 +293,7 @@ class TOPUP(FSLCommand):
outputs['out_jacs'] = [
fmt(prefix=self.inputs.out_jac_prefix, i=i, ext=ext)
for i in range(1, n_vols + 1)]
- output['out_mats'] = [
+ outputs['out_mats'] = [
fmt(prefix=self.inputs.out_mat_prefix, i=i, ext=".mat")
for i in range(1, n_vols + 1)]
|
output [ 'out_mats' ] = [ fmt ( prefix = self . inputs . out_mat_prefix , i = i , ext = ".mat" ) for i in range ( 1 , n_vols + 1 ) ]
|
outputs [ 'out_mats' ] = [ fmt ( prefix = self . inputs . out_mat_prefix , i = i , ext = ".mat" ) for i in range ( 1 , n_vols + 1 ) ]
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:output", 3, 9, 3, 15], "outputs"]]
|
psychopy
|
fe756fa684ed351ed2a0eeb32dcb676616edbb7d
|
ad9b06a37bb7be016fb88aeaf17feddd25e70b07
|
psychopy/app/utils.py
|
https://github.com/jfkominsky/psychopy
| true
| false
| true
|
@@ -216,7 +216,7 @@ class PsychopyToolbar(wx.ToolBar, ThemeMixin):
elif frame.__class__.__name__ == 'CoderFrame':
self.addPsychopyTool('filenew', 'New', 'new',
"Create new experiment file",
- self.frame.app.newBuilderFrame) # New
+ self.frame.fileNew) # New
self.addPsychopyTool('fileopen', 'Open', 'open',
"Open an existing experiment file",
self.frame.fileOpen) # Open
|
frame . __class__ . __name__ == 'CoderFrame' : self . addPsychopyTool ( 'filenew' , 'New' , 'new' , "Create new experiment file" , self . frame . app . newBuilderFrame )
|
frame . __class__ . __name__ == 'CoderFrame' : self . addPsychopyTool ( 'filenew' , 'New' , 'new' , "Create new experiment file" , self . frame . fileNew )
|
SINGLE_STMT
|
[["Move", ["argument_list", 1, 33, 3, 65], ["attribute", 3, 34, 3, 48], 9], ["Update", ["identifier:app", 3, 45, 3, 48], "fileNew"], ["Delete", [".:.", 3, 48, 3, 49]], ["Delete", ["identifier:newBuilderFrame", 3, 49, 3, 64]], ["Delete", ["attribute", 3, 34, 3, 64]]]
|
qiime
|
525e8557d20ba4f47656f2415aebf725bbb08188
|
28c0020e05e4d29d9446eb3837e500e6328386bb
|
qiime/assign_taxonomy.py
|
https://github.com/gditzler/qiime
| true
| false
| true
|
@@ -270,7 +270,7 @@ class BlastTaxonAssigner(TaxonAssigner):
""" blast each seq in seqs against blast_db and retain good hits
"""
max_evalue = self.Params['Max E value']
- min_percent_identity = self.Params['Min percent identity']
+ min_percent_identity = 100.0 * self.Params['Min percent identity']
seq_ids = [s[0] for s in seqs]
result = {}
|
min_percent_identity = self . Params [ 'Min percent identity' ]
|
min_percent_identity = 100.0 * self . Params [ 'Min percent identity' ]
|
SINGLE_STMT
|
[["Insert", ["assignment", 3, 9, 3, 67], ["binary_operator", "N0"], 2], ["Insert", "N0", ["float:100.0", "T"], 0], ["Insert", "N0", ["*:*", "T"], 1], ["Move", "N0", ["subscript", 3, 32, 3, 67], 2]]
|
Celestier
|
a7c1ed015224d16dfa3460182aa86a13d4a579d3
|
1bac28a9ed7cb9ad11b9c5d983d04d5f4b00df91
|
tests/dashboard/test_site_settings.py
|
https://github.com/fgkinus/Celestier
| true
| false
| true
|
@@ -15,7 +15,7 @@ def test_index_view(admin_client, site_settings):
assert response.status_code == 200
context = response.context
- assert context['site'] == site_settings
+ assert context['site_settings'] == site_settings
def test_site_form():
|
assert context [ 'site' ] == site_settings
|
assert context [ 'site_settings' ] == site_settings
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'site'", 3, 20, 3, 26], "'site_settings'"]]
|
py3status
|
a5ec28641e7b3eee7f4ea5a39fb9e3a30c086c79
|
9294db752fc5b448200475953b51ee145d2b3e15
|
py3status/__init__.py
|
https://github.com/Tethik/py3status
| true
| false
| true
|
@@ -133,7 +133,7 @@ def i3status_config_reader(config_file):
line = line.strip(' \t\n\r')
if line.startswith('general'):
in_general = True
- elif line.startswith('time'):
+ elif line.startswith('time') or line.startswith('tztime'):
in_time = True
elif line.startswith('}'):
in_general = False
|
if line . startswith ( 'general' ) : in_general = True elif line . startswith ( 'time' ) : in_time = True elif line . startswith ( '}' ) : in_general = False
|
if line . startswith ( 'general' ) : in_general = True elif line . startswith ( 'time' ) or line . startswith ( 'tztime' ) : in_time = True elif line . startswith ( '}' ) : in_general = False
|
LESS_SPECIFIC_IF
|
[["Insert", ["elif_clause", 3, 9, 4, 27], ["boolean_operator", "N0"], 1], ["Move", "N0", ["call", 3, 14, 3, 37], 0], ["Insert", "N0", ["or:or", "T"], 1], ["Insert", "N0", ["call", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["argument_list", "N3"], 1], ["Insert", "N2", ["identifier:line", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:startswith", "T"], 2], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["string:'tztime'", "T"], 1], ["Insert", "N3", ["):)", "T"], 2]]
|
traitsui
|
a0bb3ae98440b665d6064198e6593fc430fccfac
|
5b3e86d1829c0bbddb9f3c24fb72d8926769ca2a
|
enthought/traits/ui/qt4/toolkit.py
|
https://github.com/tivek/traitsui
| true
| false
| true
|
@@ -325,7 +325,7 @@ class GUIToolkit ( Toolkit ):
if ui.control is not None:
ui.recycle()
ui.info.ui = ui
- ui.rebuild( ui, parent )
+ ui.rebuild( ui, ui.parent )
#---------------------------------------------------------------------------
# Sets the title for the UI window:
|
ui . rebuild ( ui , parent )
|
ui . rebuild ( ui , ui . parent )
|
SINGLE_STMT
|
[["Insert", ["argument_list", 3, 19, 3, 33], ["attribute", "N0"], 3], ["Insert", "N0", ["identifier:ui", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Move", "N0", ["identifier:parent", 3, 25, 3, 31], 2]]
|
xia2
|
87a5ccd438616df3a0023c09b0b5e18bc41c9b85
|
e797a904936de70d41353f287e2154b1ab7292ff
|
Experts/SymmetryExpert.py
|
https://github.com/xia2/xia2
| true
| false
| true
|
@@ -190,7 +190,7 @@ def symop_to_mat(symop):
symop2mat = Symop2mat()
return symop2mat.convert(symop)
-def lattice_to_spacegroup(lattice):
+def lattice_to_spacegroup_number(lattice):
'''Return the spacegroup number corresponding to the lowest symmetry
possible for a given Bravais lattice.'''
|
def lattice_to_spacegroup ( lattice ) : '''Return the spacegroup number corresponding to the lowest symmetry
possible for a given Bravais lattice.'''
|
def lattice_to_spacegroup_number ( lattice ) : '''Return the spacegroup number corresponding to the lowest symmetry
possible for a given Bravais lattice.'''
|
SINGLE_TOKEN
|
[["Update", ["identifier:lattice_to_spacegroup", 3, 5, 3, 26], "lattice_to_spacegroup_number"]]
|
LumiumDepotTools
|
8ec608527e095382a640f3cb1c30464ad1dce850
|
3a16ed155e3f7ac56db71f207d5779fc97c3bae8
|
recipe_modules/bot_update/resources/bot_update.py
|
https://github.com/LumiumBrowser/LumiumDepotTools
| true
| false
| false
|
@@ -679,7 +679,7 @@ def apply_gerrit_ref(gerrit_repo, gerrit_ref, root, gerrit_reset,
ok = False
git('checkout', '-b', temp_branch_name, cwd=root)
try:
- git('rebase', base_rev, cwd=root)
+ git('rebase', base_rev, cwd=root, tries=1)
except SubprocessFailed:
# Abort the rebase since there were failures.
git('rebase', '--abort', cwd=root)
|
git ( 'rebase' , base_rev , cwd = root )
|
git ( 'rebase' , base_rev , cwd = root , tries = 1 )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 14, 3, 44], [",:,", "T"], 6], ["Insert", ["argument_list", 3, 14, 3, 44], ["keyword_argument", "N0"], 7], ["Insert", "N0", ["identifier:tries", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["integer:1", "T"], 2]]
|
kobocat
|
fa00c2a96db3cd5579593ae4534050e83894c232
|
f634bebeac3e1e984e250f5a671321e0071cdf6d
|
onadata/apps/main/urls.py
|
https://github.com/kobotoolbox/kobocat
| true
| false
| false
|
@@ -126,7 +126,7 @@ urlpatterns = [
# django default stuff
url(r'^accounts/', include('onadata.apps.main.registration_urls')),
- url(r'^admin/', include(admin.site.urls)),
+ url(r'^admin/', admin.site.urls),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# oath2_provider
|
url ( r'^admin/' , include ( admin . site . urls ) ) ,
|
url ( r'^admin/' , admin . site . urls ) ,
|
SINGLE_STMT
|
[["Delete", ["(:(", 3, 8, 3, 9]], ["Delete", ["):)", 3, 45, 3, 46]]]
|
fassembler
|
cf5db3ee741055fad550e6844be3284762fa633e
|
c23c9e778fc4242457ba09470ef249bc079e6078
|
fassembler/topp_opencore.py
|
https://github.com/socialplanning/fassembler
| true
| false
| false
|
@@ -373,7 +373,7 @@ class OpenCoreProject(Project):
default='0',
help='Whether to start Zope in debug mode'),
Setting('email_confirmation',
- default='0',
+ default='1', # opencore ftests expect it turned on!
help='Whether to send email configuration'),
## FIXME: this could differ for different profiles
## e.g., there's another bundle at:
|
Setting ( 'email_confirmation' , default = '0' , help = 'Whether to send email configuration' ) ,
|
Setting ( 'email_confirmation' , default = '1' , help = 'Whether to send email configuration' ) ,
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'0'", 3, 25, 3, 28], "'1'"]]
|
electrum-nmc
|
0fb0828b36141ca7fa1dacfaf8b0d6a2613b4f5a
|
084ca7246d0dc5bff6c7173590ba25e2c6dc04ec
|
electrum_nmc/electrum/commands.py
|
https://github.com/namecoin/electrum-nmc
| true
| false
| true
|
@@ -1386,7 +1386,7 @@ class Commands:
raise Exception("Unknown transaction (txid {})".format(txid))
if tx.txid() != txid:
- raise Exception("txid mismatch")
+ raise Exception("txid mismatch ({} vs {})".format(tx.txid(), txid))
# the tx is now verified to come from a safe height in the blockchain
|
raise Exception ( "txid mismatch" )
|
raise Exception ( "txid mismatch ({} vs {})" . format ( tx . txid ( ) , txid ) )
|
SINGLE_STMT
|
[["Insert", ["argument_list", 3, 28, 3, 45], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 28, 3, 45], ["):)", "T"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["string:\"txid mismatch ({} vs {})\"", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:format", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["call", "N3"], 1], ["Insert", "N2", [",:,", "T"], 2], ["Insert", "N2", ["identifier:txid", "T"], 3], ["Insert", "N2", ["):)", "T"], 4], ["Insert", "N3", ["attribute", "N4"], 0], ["Insert", "N3", ["argument_list", "N5"], 1], ["Insert", "N4", ["identifier:tx", "T"], 0], ["Insert", "N4", [".:.", "T"], 1], ["Insert", "N4", ["identifier:txid", "T"], 2], ["Insert", "N5", ["(:(", "T"], 0], ["Move", "N5", ["):)", 3, 44, 3, 45], 1], ["Delete", ["string:\"txid mismatch\"", 3, 29, 3, 44]]]
|
CoCoNet
|
898507e5d00e7a38d4b3f5ec11cd442c77f14d02
|
44b6d5bcfda3259fbf144a381aad02b9b63f4df2
|
coconet/parser.py
|
https://github.com/Puumanamana/CoCoNet
| true
| false
| true
|
@@ -120,7 +120,7 @@ def parse_args():
preproc_parser.add_argument(
'--min-ctg-len', type=int, default=2048,
- help='Minimum contig length. Default is 2048'
+ help='Minimum contig length'
)
preproc_parser.add_argument(
'--min-prevalence', type=int, default=2,
|
preproc_parser . add_argument ( '--min-ctg-len' , type = int , default = 2048 , help = 'Minimum contig length. Default is 2048' )
|
preproc_parser . add_argument ( '--min-ctg-len' , type = int , default = 2048 , help = 'Minimum contig length' )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'Minimum contig length. Default is 2048'", 3, 12, 3, 52], "'Minimum contig length'"]]
|
purvar-agent
|
7c3dba086da5e133d3f27cde36260e857eb73118
|
f122e1c0302ef3955a78a95330c13c5144278dfb
|
checks.d/haproxy.py
|
https://github.com/jyogi/purvar-agent
| true
| false
| true
|
@@ -74,7 +74,7 @@ class HAProxy(AgentCheck):
authhandler = urllib2.HTTPBasicAuthHandler(passman)
opener = urllib2.build_opener(authhandler)
urllib2.install_opener(opener)
- url = "%s%s" % (url, STATS_URL)
+ url = urllib2.urlparse.urljoin(url, STATS_URL)
self.log.debug("HAProxy Fetching haproxy search data from: %s" % url)
|
url = "%s%s" % ( url , STATS_URL )
|
url = urllib2 . urlparse . urljoin ( url , STATS_URL )
|
SINGLE_STMT
|
[["Insert", ["assignment", 3, 9, 3, 40], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["attribute", "N3"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:urljoin", "T"], 2], ["Move", "N2", ["(:(", 3, 24, 3, 25], 0], ["Move", "N2", ["identifier:url", 3, 25, 3, 28], 1], ["Move", "N2", [",:,", 3, 28, 3, 29], 2], ["Move", "N2", ["identifier:STATS_URL", 3, 30, 3, 39], 3], ["Move", "N2", ["):)", 3, 39, 3, 40], 4], ["Insert", "N3", ["identifier:urllib2", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:urlparse", "T"], 2], ["Delete", ["string:\"%s%s\"", 3, 15, 3, 21]], ["Delete", ["%:%", 3, 22, 3, 23]], ["Delete", ["tuple", 3, 24, 3, 40]], ["Delete", ["binary_operator", 3, 15, 3, 40]]]
|
osis-partnership
|
475000ba21e2b2642b1d38b5d004bb54f3994006
|
32f7bb7f2a636203f0417b58c8a198b6f121d34f
|
api/serializers.py
|
https://github.com/uclouvain/osis-partnership
| true
| false
| false
|
@@ -123,7 +123,7 @@ class PartnershipSerializer(serializers.ModelSerializer):
)
partner = PartnerSerializer()
partner_entity = serializers.CharField(source='partner_entity.name', allow_null=True)
- supervisor = serializers.CharField(allow_null=True)
+ supervisor = serializers.CharField(source='get_supervisor', allow_null=True)
ucl_sector = serializers.CharField(source='sector_most_recent_acronym', allow_null=True)
ucl_university = EntitySerializer()
ucl_university_labo = EntitySerializer()
|
supervisor = serializers . CharField ( allow_null = True )
|
supervisor = serializers . CharField ( source = 'get_supervisor' , allow_null = True )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 39, 3, 56], ["keyword_argument", "N0"], 1], ["Insert", ["argument_list", 3, 39, 3, 56], [",:,", "T"], 2], ["Insert", "N0", ["identifier:source", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["string:'get_supervisor'", "T"], 2]]
|
carbon
|
f55b9236e88f9f7daa39a12b9612d29f30199846
|
dfbaf385810f974ff32ed3705d91237f7ada10fb
|
lib/carbon/aggregator/buffers.py
|
https://github.com/slackhappy/carbon
| true
| false
| true
|
@@ -64,7 +64,7 @@ class MetricBuffer:
buffer.mark_inactive()
if buffer.interval < age_threshold:
- del self.interval_values[interval]
+ del self.interval_values[buffer.interval]
def close(self):
if self.compute_task and self.compute_task.running:
|
del self . interval_values [ interval ]
|
del self . interval_values [ buffer . interval ]
|
SINGLE_STMT
|
[["Insert", ["subscript", 3, 13, 3, 43], ["attribute", "N0"], 2], ["Insert", "N0", ["identifier:buffer", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Move", "N0", ["identifier:interval", 3, 34, 3, 42], 2]]
|
scipy_proceedings
|
774d3d67203647e488cb0bde5c36e6698a90695f
|
d1c459c0b04ed84d6487b15468375a1fb4cfdc3e
|
publisher/build_paper.py
|
https://github.com/aterrel/scipy_proceedings
| true
| false
| false
|
@@ -124,7 +124,7 @@ out = open(os.path.join(out_path, 'paper.tex'), 'w')
out.write(tex)
out.close()
-page_nr_f = os.path.join(out_path, 'page_numers.tex')
+page_nr_f = os.path.join(out_path, 'page_numbers.tex')
if not os.path.exists(page_nr_f):
out = open(page_nr_f, 'w')
out.close()
|
page_nr_f = os . path . join ( out_path , 'page_numers.tex' )
|
page_nr_f = os . path . join ( out_path , 'page_numbers.tex' )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'page_numers.tex'", 3, 36, 3, 53], "'page_numbers.tex'"]]
|
django
|
145467a63688eb9055d176d84655cc029060da30
|
01f2cf2aecc932d43b20b55fc19a8fa440457b5f
|
tests/migrations/test_operations.py
|
https://github.com/oscaro/django
| true
| false
| true
|
@@ -80,7 +80,7 @@ class OperationTestBase(MigrationTestBase):
# Make the "current" state
model_options = {
"swappable": "TEST_SWAP_MODEL",
- "index_together": [["pink", "weight"]] if index_together else [],
+ "index_together": [["weight", "pink"]] if index_together else [],
"unique_together": [["pink", "weight"]] if unique_together else [],
}
|
model_options = { "swappable" : "TEST_SWAP_MODEL" , "index_together" : [ [ "pink" , "weight" ] ] if index_together else [ ] , "unique_together" : [ [ "pink" , "weight" ] ] if unique_together else [ ] , }
|
model_options = { "swappable" : "TEST_SWAP_MODEL" , "index_together" : [ [ "weight" , "pink" ] ] if index_together else [ ] , "unique_together" : [ [ "pink" , "weight" ] ] if unique_together else [ ] , }
|
SINGLE_STMT
|
[["Move", ["string:\"pink\"", 3, 33, 3, 39], ["list", 3, 32, 3, 50], 3], ["Move", [",:,", 3, 39, 3, 40], ["list", 3, 32, 3, 50], 4]]
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 200