project
stringlengths 1
98
| commit_sha
stringlengths 40
40
| parent_sha
stringlengths 40
40
| file_path
stringlengths 4
209
| project_url
stringlengths 23
132
| likely_bug
bool 1
class | comodified
bool 1
class | in_function
bool 2
classes | diff
stringlengths 27
9.71k
| before
stringlengths 1
5.36k
⌀ | after
stringlengths 1
6k
⌀ | sstub_pattern
stringclasses 23
values | edit_script
stringlengths 33
158k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|
anaconda
|
5ec6bdead674c490a3a3584ca10b9c7e699d2f7e
|
141b014089ca19654f446653c9686c91381fc0e2
|
storage/devices.py
|
https://github.com/vojtechtrefny/anaconda
| true
| false
| true
|
@@ -2104,7 +2104,7 @@ class MDRaidArrayDevice(StorageDevice):
if self.level is None or self.memberDevices is None or not self.uuid:
raise DeviceError("array is not fully defined")
- fmt = "ARRAY level=%s num-devices=%d UUID=%s"
+ fmt = "ARRAY level=raid%d num-devices=%d UUID=%s\n"
return fmt % (self.level, self.memberDevices, self.uuid)
@property
|
fmt = "ARRAY level=%s num-devices=%d UUID=%s"
|
fmt = "ARRAY level=raid%d num-devices=%d UUID=%s\n"
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"ARRAY level=%s num-devices=%d UUID=%s\"", 3, 15, 3, 54], "\"ARRAY level=raid%d num-devices=%d UUID=%s\\n\""]]
|
conda-mirror
|
26b60137edf4b58353fc2fe289094d9d45a31e12
|
a5601f0e44695d6444a204f0bc6c24c985455619
|
conda_mirror/conda_mirror.py
|
https://github.com/jakirkham/conda-mirror
| true
| false
| true
|
@@ -301,7 +301,7 @@ def _validate(filename, md5=None, size=None):
try:
t = tarfile.open(filename)
t.extractfile('info/index.json').read().decode('utf-8')
- except tarfile.TarError:
+ except (tarfile.TarError, EOFError):
logger.info("Validation failed because conda package is corrupted.",
exc_info=True)
return _remove_package(filename, reason="Tarfile read failure")
|
try : t = tarfile . open ( filename ) t . extractfile ( 'info/index.json' ) . read ( ) . decode ( 'utf-8' ) except tarfile . TarError : logger . info ( "Validation failed because conda package is corrupted." , exc_info = True ) return _remove_package ( filename , reason = "Tarfile read failure" )
|
try : t = tarfile . open ( filename ) t . extractfile ( 'info/index.json' ) . read ( ) . decode ( 'utf-8' ) except ( tarfile . TarError , EOFError ) : logger . info ( "Validation failed because conda package is corrupted." , exc_info = True ) return _remove_package ( filename , reason = "Tarfile read failure" )
|
SINGLE_STMT
|
[["Insert", ["except_clause", 3, 5, 6, 72], ["tuple", "N0"], 1], ["Insert", "N0", ["(:(", "T"], 0], ["Move", "N0", ["attribute", 3, 12, 3, 28], 1], ["Insert", "N0", [",:,", "T"], 2], ["Insert", "N0", ["identifier:EOFError", "T"], 3], ["Insert", "N0", ["):)", "T"], 4]]
|
ermrest
|
a619dfd395a118ad40533da24006f26d4f72f4f0
|
0de46a97da404abe0d00e4900f054ccc44d03892
|
test/resttest/joins.py
|
https://github.com/informatics-isi-edu/ermrest
| true
| false
| true
|
@@ -140,7 +140,7 @@ class JoinedProjections (common.ErmrestTest):
class MultiKeyReference (common.ErmrestTest):
def test_implicit_multi(self):
# regression test for ermrest#160, internal server error with MultiKeyReference
- self.assertHttp(self.session.get('entity/%(T1)s/%(T2b)s' % {'T1': _T1, 'T2b': _T2b}), 200)
+ self.assertHttp(self.session.get('entity/%(S)s:%(T1)s/%(S)s:%(T2b)s' % {'T1': _T1, 'T2b': _T2b, 'S': _S}), 200)
if __name__ == '__main__':
unittest.main(verbosity=2)
|
self . assertHttp ( self . session . get ( 'entity/%(T1)s/%(T2b)s' % { 'T1' : _T1 , 'T2b' : _T2b } ) , 200 )
|
self . assertHttp ( self . session . get ( 'entity/%(S)s:%(T1)s/%(S)s:%(T2b)s' % { 'T1' : _T1 , 'T2b' : _T2b , 'S' : _S } ) , 200 )
|
SINGLE_STMT
|
[["Update", ["string:'entity/%(T1)s/%(T2b)s'", 3, 42, 3, 65], "'entity/%(S)s:%(T1)s/%(S)s:%(T2b)s'"], ["Insert", ["dictionary", 3, 68, 3, 92], [",:,", "T"], 4], ["Insert", ["dictionary", 3, 68, 3, 92], ["pair", "N0"], 5], ["Insert", "N0", ["string:'S'", "T"], 0], ["Insert", "N0", [":::", "T"], 1], ["Insert", "N0", ["identifier:_S", "T"], 2]]
|
isida3
|
2c7c4bd1d5786f18ae837c0154a27179709d9917
|
d142ab87dbdedc2dab967a36a67bea9718c9282f
|
plugins/acl.py
|
https://github.com/disabler/isida3
| true
| false
| true
|
@@ -177,7 +177,7 @@ def acl_presence(room,jid,nick,type,mass):
if a:
for tmp in a:
if tmp[0] == 'age':
- in_base = cur_execute_fetchall('select time,sum(age),status from age where room=%s and jid=%s order by status',(room,getRoom(jid)))
+ in_base = cur_execute_fetchall('select time,sum(age),status from age where room=%s and jid=%s',(room,getRoom(jid)))
if not in_base: r_age = 0
else:
try:
|
in_base = cur_execute_fetchall ( 'select time,sum(age),status from age where room=%s and jid=%s order by status' , ( room , getRoom ( jid ) ) )
|
in_base = cur_execute_fetchall ( 'select time,sum(age),status from age where room=%s and jid=%s' , ( room , getRoom ( jid ) ) )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'select time,sum(age),status from age where room=%s and jid=%s order by status'", 3, 36, 3, 115], "'select time,sum(age),status from age where room=%s and jid=%s'"]]
|
2019-01-14-uwmadison-swc
|
f82fed8a8cb8d75147733eb9057a6a6c453a2007
|
f55a015a22ce02ecce3ec22b095a89756847002f
|
bin/util.py
|
https://github.com/UW-Madison-DataScience/2019-01-14-uwmadison-swc
| true
| false
| true
|
@@ -160,7 +160,7 @@ def load_yaml(filename):
try:
with open(filename, 'r') as reader:
return yaml.load(reader)
- except (yaml.YAMLError, FileNotFoundError) as e:
+ except (yaml.YAMLError, IOError) as e:
print('Unable to load YAML file {0}:\n{1}'.format(filename, e), file=sys.stderr)
sys.exit(1)
|
try : with open ( filename , 'r' ) as reader : return yaml . load ( reader ) except ( yaml . YAMLError , FileNotFoundError ) as e : print ( 'Unable to load YAML file {0}:\n{1}' . format ( filename , e ) , file = sys . stderr ) sys . exit ( 1 )
|
try : with open ( filename , 'r' ) as reader : return yaml . load ( reader ) except ( yaml . YAMLError , IOError ) as e : print ( 'Unable to load YAML file {0}:\n{1}' . format ( filename , e ) , file = sys . stderr ) sys . exit ( 1 )
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:FileNotFoundError", 3, 29, 3, 46], "IOError"]]
|
ansible
|
c0a677aa3a35e6204fb9d86608fce68356ef08e7
|
002ce4b3520167a8e70a3eefea061c85c4f70491
|
lib/ansible/modules/extras/cloud/misc/proxmox.py
|
https://github.com/ozboms/ansible
| true
| false
| true
|
@@ -290,7 +290,7 @@ def main():
password = dict(no_log=True),
hostname = dict(),
ostemplate = dict(),
- disk = dict(type='int', default=3),
+ disk = dict(type='str', default='3'),
cpus = dict(type='int', default=1),
memory = dict(type='int', default=512),
swap = dict(type='int', default=0),
|
disk = dict ( type = 'int' , default = 3 ) ,
|
disk = dict ( type = 'str' , default = '3' ) ,
|
SINGLE_STMT
|
[["Update", ["string:'int'", 3, 24, 3, 29], "'str'"], ["Insert", ["keyword_argument", 3, 31, 3, 40], ["string:'3'", "T"], 2], ["Delete", ["integer:3", 3, 39, 3, 40]]]
|
twisted
|
9a3d0d2921ec234c9c9e21996761793126512933
|
a4663e99b4cd8c7863312ca48aeb7fd0568613a0
|
twisted/test/test_unix.py
|
https://github.com/PreVeil/twisted
| true
| false
| true
|
@@ -74,7 +74,7 @@ class PortCleanerUpper(unittest.TestCase):
wait(d)
else:
try:
- util.spinUntil(lambda :p.disconnected)
+ spinUntil(lambda :p.disconnected)
except:
failure.Failure().printTraceback()
|
else : try : util . spinUntil ( lambda : p . disconnected )
|
else : try : spinUntil ( lambda : p . disconnected )
|
SINGLE_STMT
|
[["Move", ["call", 3, 25, 3, 63], ["identifier:spinUntil", 3, 30, 3, 39], 0], ["Delete", ["identifier:util", 3, 25, 3, 29]], ["Delete", [".:.", 3, 29, 3, 30]], ["Delete", ["attribute", 3, 25, 3, 39]]]
|
deb-aodh
|
75e46a5c67a21cd6d34d9bc29dfa8529049512c8
|
29f5edad8ee6a1949839e946ab60861c141702e5
|
tools/make_test_data.py
|
https://github.com/openstack/deb-aodh
| true
| false
| true
|
@@ -33,7 +33,7 @@ from ceilometer.openstack.common import timeutils
def main():
- cfg.CONF([])
+ cfg.CONF([], project='ceilometer')
parser = argparse.ArgumentParser(
description='generate metering data',
|
cfg . CONF ( [ ] )
|
cfg . CONF ( [ ] , project = 'ceilometer' )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 13, 3, 17], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 13, 3, 17], ["keyword_argument", "N0"], 3], ["Insert", "N0", ["identifier:project", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["string:'ceilometer'", "T"], 2]]
|
persian-twitter-day
|
18ec01bc49f397b550220ea9e698e01d70db7c61
|
6c1a8579b31a1b4226bbdc50f1f9b72d8930b464
|
tw_analysis/settings/base.py
|
https://github.com/mehotkhan/persian-twitter-day
| true
| false
| false
|
@@ -99,7 +99,7 @@ USE_TZ = True
# https://docs.djangoproject.com/en/1.11/howto/static-files/
# PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
-STATIC_ROOT = BASE_DIR + r'\static' 'static'
+STATIC_ROOT = BASE_DIR + r'\static'
STATIC_URL = '/static/'
CELERY_BROKER_URL = REDIS
CELERY_RESULT_BACKEND = REDIS
|
STATIC_ROOT = BASE_DIR + r'\static' 'static'
|
STATIC_ROOT = BASE_DIR + r'\static'
|
CHANGE_BINARY_OPERAND
|
[["Move", ["binary_operator", 3, 15, 3, 45], ["string:r'\\static'", 3, 26, 3, 36], 2], ["Delete", ["string:'static'", 3, 37, 3, 45]], ["Delete", ["concatenated_string", 3, 26, 3, 45]]]
|
sp
|
8b3488eddf0089f72d6aece11db4200130ee06d2
|
f2574e85bff7f0b3bb02969283d6fd1142c020e5
|
frontend/models.py
|
https://github.com/the0forge/sp
| true
| false
| true
|
@@ -302,7 +302,7 @@ class PriceLevel(models.Model):
super(PriceLevel, self).save(*args, **kwargs)
def __unicode__(self):
- return self.price_level_group
+ return 'Level #%s (Group: %s)' % (self.pk, self.price_level_group or '')
class Order(models.Model):
|
return self . price_level_group
|
return 'Level #%s (Group: %s)' % ( self . pk , self . price_level_group or '' )
|
SINGLE_STMT
|
[["Insert", ["return_statement", 3, 9, 3, 38], ["binary_operator", "N0"], 1], ["Insert", "N0", ["string:'Level #%s (Group: %s)'", "T"], 0], ["Insert", "N0", ["%:%", "T"], 1], ["Insert", "N0", ["tuple", "N1"], 2], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["attribute", "N2"], 1], ["Insert", "N1", [",:,", "T"], 2], ["Insert", "N1", ["boolean_operator", "N3"], 3], ["Insert", "N1", ["):)", "T"], 4], ["Insert", "N2", ["identifier:self", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:pk", "T"], 2], ["Move", "N3", ["attribute", 3, 16, 3, 38], 0], ["Insert", "N3", ["or:or", "T"], 1], ["Insert", "N3", ["string:''", "T"], 2]]
|
pyrocko
|
f8c8c8d851a54debae05b683c3b2b7560d36debe
|
4e346d7485700f13f15cbf5e300c4adb14879e89
|
src/fdsn/station.py
|
https://github.com/hvasbath/pyrocko
| true
| false
| false
|
@@ -528,7 +528,7 @@ class Comment(Object):
value = Unicode.T(xmltagname='Value')
begin_effective_time = Timestamp.T(optional=True,
xmltagname='BeginEffectiveTime')
- end_effective_time = Timestamp.T(optional=True,
+ end_effective_time = DummyAwareOptionalTimestamp.T(optional=True,
xmltagname='EndEffectiveTime')
author_list = List.T(Person.T(xmltagname='Author'))
|
end_effective_time = Timestamp . T ( optional = True , xmltagname = 'EndEffectiveTime' )
|
end_effective_time = DummyAwareOptionalTimestamp . T ( optional = True , xmltagname = 'EndEffectiveTime' )
|
SAME_FUNCTION_WRONG_CALLER
|
[["Update", ["identifier:Timestamp", 3, 26, 3, 35], "DummyAwareOptionalTimestamp"]]
|
NotiHub
|
86be64ea1b75b90ac14f735d1217d43503c6882f
|
8c21714916a372eaea39869a43d5e4c22481309e
|
NotiHub/services/__init__.py
|
https://github.com/bearbear12345/NotiHub
| true
| false
| false
|
@@ -20,4 +20,4 @@ for _, servicenamefull, _ in pkgutil.walk_packages(path=pkgutil.extend_path(__pa
version[servicename] = ""
globals()[servicename] = service.service
except Exception as e:
- dprint(" Error importing " + servicenamefull + " - " + e)
+ dprint(" Error importing " + servicenamefull + " - " + str(e))
|
except Exception as e : dprint ( " Error importing " + servicenamefull + " - " + e )
|
except Exception as e : dprint ( " Error importing " + servicenamefull + " - " + str ( e ) )
|
ADD_FUNCTION_AROUND_EXPRESSION
|
[["Insert", ["binary_operator", 3, 16, 3, 68], ["call", "N0"], 2], ["Insert", "N0", ["identifier:str", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["identifier:e", 3, 67, 3, 68], 1], ["Insert", "N1", ["):)", "T"], 2]]
|
phileas_edxhawthorn2
|
a1af17e4b1d69c3f1edea1bd5b9e77227b3a86e7
|
b211e9fd368c2b10d880e42395604cfad0f5cf79
|
pavelib/assets.py
|
https://github.com/TheMOOCAgency/phileas_edxhawthorn2
| true
| false
| true
|
@@ -143,7 +143,7 @@ def compile_sass(debug=False):
if debug:
parts.append("--sourcemap")
else:
- parts.append("--style compressed")
+ parts.append("--style compressed --quiet")
for load_path in SASS_LOAD_PATHS + SASS_DIRS.keys():
parts.append("--load-path {path}".format(path=load_path))
|
parts . append ( "--style compressed" )
|
parts . append ( "--style compressed --quiet" )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"--style compressed\"", 3, 22, 3, 42], "\"--style compressed --quiet\""]]
|
deployment-support
|
052f339c9b42d1db24060d464a625d7454533075
|
1c1131d71535e430535ba140d449361e236db70a
|
openstack/fuel/big_patch.py
|
https://github.com/nmadhok/deployment-support
| true
| false
| false
|
@@ -892,7 +892,7 @@ ini_setting {"auto_failover":
path => $neutron_conf_path,
section => 'DEFAULT',
setting => 'allow_automatic_l3agent_failover',
- value => 'True',
+ value => 'False',
ensure => present,
notify => Exec['restartneutronservices'],
}
|
value = > 'True' ,
|
value = > 'False' ,
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'True'", 3, 14, 3, 20], "'False'"]]
|
pylearn2
|
5ee03803b9607885f06aa14dc6a1d1e4bd8f0488
|
e5a308ed8f9c9dff7710a52889b92b0eaa36e4ef
|
pylearn2/kmeans.py
|
https://github.com/awesome-python/pylearn2
| true
| false
| true
|
@@ -98,7 +98,7 @@ class KMeans(Block, Model):
except MemoryError:
raise TypicalMemoryError("dying trying to allocate dists "
"matrix for {0} examples and {1} "
- "means\n".format(n, k))
+ "means".format(n, k))
old_kills = {}
|
MemoryError : raise TypicalMemoryError ( "dying trying to allocate dists " "matrix for {0} examples and {1} " "means\n" . format ( n , k ) )
|
MemoryError : raise TypicalMemoryError ( "dying trying to allocate dists " "matrix for {0} examples and {1} " "means" . format ( n , k ) )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"means\\n\"", 3, 42, 3, 51], "\"means\""]]
|
py-translate
|
a409eea0465d56af814902f5250a0bb1708d0a64
|
b7cbd9507141235d29a86f343c5b3b3e1c01b809
|
translate/languages.py
|
https://github.com/saradindusengupta/py-translate
| true
| false
| true
|
@@ -22,4 +22,4 @@ def language_codes(lang):
codes = load_codes('supported_translations.json')[lang]
for code in codes:
- print(code['name'].ljust(23), code['language'].ljust(5))
+ print(code['name'].ljust(23), '\t', code['language'].ljust(5))
|
print ( code [ 'name' ] . ljust ( 23 ) , code [ 'language' ] . ljust ( 5 ) )
|
print ( code [ 'name' ] . ljust ( 23 ) , '\t' , code [ 'language' ] . ljust ( 5 ) )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 2, 14, 2, 65], ["string:'\\t'", "T"], 3], ["Insert", ["argument_list", 2, 14, 2, 65], [",:,", "T"], 4]]
|
ansible-modules-core
|
788230c43e22e6433d8ff23b67fc828315497b0b
|
f7062fc6d26d86ecb3da44802406fe43cb59dbfe
|
packaging/os/yum.py
|
https://github.com/gundalow/ansible-modules-core
| true
| false
| false
|
@@ -768,7 +768,7 @@ def ensure(module, state, pkgspec, conf_file, enablerepo, disablerepo,
a = rid.repoXML.repoid
current_repos = new_repos
except yum.Errors.YumBaseError, e:
- module.fail_json(msg="Error setting/accessing repo %s: %s" % (r, e))
+ module.fail_json(msg="Error setting/accessing repos: %s" % (e))
except yum.Errors.YumBaseError, e:
module.fail_json(msg="Error accessing repos: %s" % e)
if state in ['installed', 'present']:
|
except yum . Errors . YumBaseError , e : module . fail_json ( msg = "Error setting/accessing repo %s: %s" % ( r , e ) )
|
except yum . Errors . YumBaseError , e : module . fail_json ( msg = "Error setting/accessing repos: %s" % ( e ) )
|
SINGLE_STMT
|
[["Update", ["string:\"Error setting/accessing repo %s: %s\"", 3, 42, 3, 79], "\"Error setting/accessing repos: %s\""], ["Insert", ["binary_operator", 3, 42, 3, 88], ["parenthesized_expression", "N0"], 2], ["Move", "N0", ["(:(", 3, 82, 3, 83], 0], ["Move", "N0", ["identifier:e", 3, 86, 3, 87], 1], ["Move", "N0", ["):)", 3, 87, 3, 88], 2], ["Delete", ["identifier:r", 3, 83, 3, 84]], ["Delete", [",:,", 3, 84, 3, 85]], ["Delete", ["tuple", 3, 82, 3, 88]]]
|
favaz-erp
|
fc60335620fabaceed760b4187c8a1650ff0c54f
|
3dfa4401ff9820a1c5059781427735a89804bf9e
|
erpnext/hr/doctype/leave_application/leave_application.py
|
https://github.com/sagminsolutions/favaz-erp
| true
| false
| true
|
@@ -30,7 +30,7 @@ class DocType:
def get_leave_balance(self):
leave_all = sql("select total_leaves_allocated from `tabLeave Allocation` where employee = '%s' and leave_type = '%s' and fiscal_year = '%s' and docstatus = 1" % (self.doc.employee, self.doc.leave_type, self.doc.fiscal_year))
leave_all = leave_all and flt(leave_all[0][0]) or 0
- leave_app = sql("select total_leave_days from `tabLeave Application` where employee = '%s' and leave_type = '%s' and fiscal_year = '%s' and docstatus = 1" % (self.doc.employee, self.doc.leave_type, self.doc.fiscal_year))
+ leave_app = sql("select SUM(total_leave_days) from `tabLeave Application` where employee = '%s' and leave_type = '%s' and fiscal_year = '%s' and docstatus = 1" % (self.doc.employee, self.doc.leave_type, self.doc.fiscal_year))
leave_app = leave_app and flt(leave_app[0][0]) or 0
ret = {'leave_balance':leave_all - leave_app}
return ret
|
leave_app = sql ( "select total_leave_days from `tabLeave Application` where employee = '%s' and leave_type = '%s' and fiscal_year = '%s' and docstatus = 1" % ( self . doc . employee , self . doc . leave_type , self . doc . fiscal_year ) )
|
leave_app = sql ( "select SUM(total_leave_days) from `tabLeave Application` where employee = '%s' and leave_type = '%s' and fiscal_year = '%s' and docstatus = 1" % ( self . doc . employee , self . doc . leave_type , self . doc . fiscal_year ) )
|
CHANGE_BINARY_OPERAND
|
[["Update", ["string:\"select total_leave_days from `tabLeave Application` where employee = '%s' and leave_type = '%s' and fiscal_year = '%s' and docstatus = 1\"", 3, 19, 3, 157], "\"select SUM(total_leave_days) from `tabLeave Application` where employee = '%s' and leave_type = '%s' and fiscal_year = '%s' and docstatus = 1\""]]
|
erpnext
|
2a0e3e3515ed4b25cbbe771ec39d5472aa9d630e
|
9d81f97fe677d0ac92064fc9a4e37bf4f5d4a6a0
|
erpnext/accounts/doctype/tax_withholding_category/tax_withholding_category.py
|
https://github.com/szufisher/erpnext
| true
| false
| true
|
@@ -58,7 +58,7 @@ def get_tax_withholding_details(tax_withholding_category, fiscal_year, company):
"rate": tax_rate_detail.tax_withholding_rate,
"threshold": tax_rate_detail.single_threshold,
"cumulative_threshold": tax_rate_detail.cumulative_threshold,
- "description": tax_withholding.category_name.strip() if tax_withholding.category_name.strip() else tax_withholding_category
+ "description": tax_withholding.category_name if tax_withholding.category_name else tax_withholding_category
})
def get_tax_withholding_rates(tax_withholding, fiscal_year):
|
tax_withholding . category_name . strip ( ) if tax_withholding . category_name . strip ( ) else tax_withholding_category
|
tax_withholding . category_name if tax_withholding . category_name else tax_withholding_category
|
SINGLE_STMT
|
[["Move", ["conditional_expression", 3, 20, 3, 128], ["attribute", 3, 20, 3, 49], 0], ["Move", ["conditional_expression", 3, 20, 3, 128], ["attribute", 3, 61, 3, 90], 3], ["Delete", [".:.", 3, 49, 3, 50]], ["Delete", ["identifier:strip", 3, 50, 3, 55]], ["Delete", ["attribute", 3, 20, 3, 55]], ["Delete", ["(:(", 3, 55, 3, 56]], ["Delete", ["):)", 3, 56, 3, 57]], ["Delete", ["argument_list", 3, 55, 3, 57]], ["Delete", ["call", 3, 20, 3, 57]], ["Delete", [".:.", 3, 90, 3, 91]], ["Delete", ["identifier:strip", 3, 91, 3, 96]], ["Delete", ["attribute", 3, 61, 3, 96]], ["Delete", ["(:(", 3, 96, 3, 97]], ["Delete", ["):)", 3, 97, 3, 98]], ["Delete", ["argument_list", 3, 96, 3, 98]], ["Delete", ["call", 3, 61, 3, 98]]]
|
hdx-ckan
|
8ccc414bd222146ff9d34576f00954228ad9c92e
|
9a3c682a67eb02600aa2a53f0bd003b6b9a570b9
|
ckan/controllers/home.py
|
https://github.com/teodorescuserban/hdx-ckan
| true
| false
| true
|
@@ -68,7 +68,7 @@ class HomeController(BaseController):
'res_format': _('Formats'),
'license': _('Licence'), }
- data_dict = {'order_by': 'packages', 'all_fields': 1}
+ data_dict = {'sort': 'packages', 'all_fields': 1}
# only give the terms to group dictize that are returned in the
# facets as full results take a lot longer
if 'groups' in c.search_facets:
|
data_dict = { 'order_by' : 'packages' , 'all_fields' : 1 }
|
data_dict = { 'sort' : 'packages' , 'all_fields' : 1 }
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'order_by'", 3, 26, 3, 36], "'sort'"]]
|
cpython
|
b5fa74dc112d06f3fdd4f83bcba32bf518b94691
|
7147592a9e9886cb19c7a38fe45261f75c4d2cb5
|
Lib/lib-old/poly.py
|
https://github.com/noirbizarre/cpython
| true
| false
| true
|
@@ -7,7 +7,7 @@
def normalize(p): # Strip unnecessary zero coefficients
n = len(p)
- while p:
+ while n:
if p[n-1]: return p[:n]
n = n-1
return []
|
while p : if p [ n - 1 ] : return p [ : n ] n = n - 1
|
while n : if p [ n - 1 ] : return p [ : n ] n = n - 1
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:p", 3, 8, 3, 9], "n"]]
|
binderhub
|
67ce6d803441198ef132299370ee145901239404
|
e96241e13192d13b9241bfe7c40f37b942302504
|
binderhub/repoproviders.py
|
https://github.com/gesiscss/binderhub
| true
| false
| true
|
@@ -380,7 +380,7 @@ class GitHubGistRepoProvider(GitHubRepoProvider):
parts = self.spec.split('/')
self.user, self.gist_id, *_ = parts
if len(parts) > 2:
- self.unresolved_ref = parts[1]
+ self.unresolved_ref = parts[2]
else:
self.unresolved_ref = ''
|
self . unresolved_ref = parts [ 1 ]
|
self . unresolved_ref = parts [ 2 ]
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:1", 3, 41, 3, 42], "2"]]
|
cozy
|
4777810e9529a30c3e46a7f052a6c6096980f893
|
2a861518e309a22ced6f084dff6628f043f76908
|
cozy/evaluation.py
|
https://github.com/CozySynthesizer/cozy
| true
| false
| true
|
@@ -727,7 +727,7 @@ def _compile(e, env : {str:int}, out, bind_callback):
out.append(initialize)
out.append(loop)
elif isinstance(e, EMakeMap2):
- _compile(EMap(e.e, ELambda(e.value.arg, ETuple((e.value.arg, e.value.body)))).with_type(TBag(TTuple((e.value.arg.type, e.value.body.type)))), env, out, bind_callback=bind_callback)
+ _compile(EMap(e.e, ELambda(e.value.arg, ETuple((e.value.arg, e.value.body)).with_type(TTuple((e.value.arg.type, e.value.body.type))))).with_type(TBag(TTuple((e.value.arg.type, e.value.body.type)))), env, out, bind_callback=bind_callback)
default = mkval(e.type.v)
def make_map(stk):
res = Map(e.type, default)
|
_compile ( EMap ( e . e , ELambda ( e . value . arg , ETuple ( ( e . value . arg , e . value . body ) ) ) ) . with_type ( TBag ( TTuple ( ( e . value . arg . type , e . value . body . type ) ) ) ) , env , out , bind_callback = bind_callback )
|
_compile ( EMap ( e . e , ELambda ( e . value . arg , ETuple ( ( e . value . arg , e . value . body ) ) . with_type ( TTuple ( ( e . value . arg . type , e . value . body . type ) ) ) ) ) . with_type ( TBag ( TTuple ( ( e . value . arg . type , e . value . body . type ) ) ) ) , env , out , bind_callback = bind_callback )
|
ADD_METHOD_CALL
|
[["Insert", ["call", 3, 49, 3, 84], ["attribute", "N0"], 0], ["Insert", ["call", 3, 49, 3, 84], ["argument_list", "N1"], 1], ["Move", "N0", ["call", 3, 49, 3, 84], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:with_type", "T"], 2], ["Insert", "N1", ["(:(", "T"], 0], ["Insert", "N1", ["call", "N2"], 1], ["Insert", "N1", ["):)", "T"], 2], ["Insert", "N2", ["identifier:TTuple", "T"], 0], ["Insert", "N2", ["argument_list", "N3"], 1], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["tuple", "N4"], 1], ["Insert", "N3", ["):)", "T"], 2], ["Insert", "N4", ["(:(", "T"], 0], ["Insert", "N4", ["attribute", "N5"], 1], ["Insert", "N4", [",:,", "T"], 2], ["Insert", "N4", ["attribute", "N6"], 3], ["Insert", "N4", ["):)", "T"], 4], ["Insert", "N5", ["attribute", "N7"], 0], ["Insert", "N5", [".:.", "T"], 1], ["Insert", "N5", ["identifier:type", "T"], 2], ["Insert", "N6", ["attribute", "N8"], 0], ["Insert", "N6", [".:.", "T"], 1], ["Insert", "N6", ["identifier:type", "T"], 2], ["Insert", "N7", ["attribute", "N9"], 0], ["Insert", "N7", [".:.", "T"], 1], ["Insert", "N7", ["identifier:arg", "T"], 2], ["Insert", "N8", ["attribute", "N10"], 0], ["Insert", "N8", [".:.", "T"], 1], ["Insert", "N8", ["identifier:body", "T"], 2], ["Insert", "N9", ["identifier:e", "T"], 0], ["Insert", "N9", [".:.", "T"], 1], ["Insert", "N9", ["identifier:value", "T"], 2], ["Insert", "N10", ["identifier:e", "T"], 0], ["Insert", "N10", [".:.", "T"], 1], ["Insert", "N10", ["identifier:value", "T"], 2]]
|
deb-python-pyghmi
|
d022c58e61e46e84a7a367849511e49a5391f9f0
|
f223ed784998555fc4761be87d6fd025a0e7dcc3
|
pyghmi/ipmi/fru.py
|
https://github.com/openstack/deb-python-pyghmi
| true
| false
| true
|
@@ -85,7 +85,7 @@ def unpack6bitascii(inputdata):
currchar = (currchunk[0] & 0b11000000) >> 6
currchar |= (currchunk[1] & 0b1111) << 2
result += chr(0x20 + currchar)
- currchar = (currchunk[1] & 0b1111000) >> 4
+ currchar = (currchunk[1] & 0b11110000) >> 4
currchar |= (currchunk[2] & 0b11) << 4
result += chr(0x20 + currchar)
currchar = (currchunk[2] & 0b11111100) >> 2
|
currchar = ( currchunk [ 1 ] & 0b1111000 ) >> 4
|
currchar = ( currchunk [ 1 ] & 0b11110000 ) >> 4
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:0b1111000", 3, 36, 3, 45], "0b11110000"]]
|
python
|
655e26acd1748b88bae6a8cd27ed3fed4caf45e8
|
40c0aef70a65db2cce2bf55d99aa88802cbe4dfa
|
src/you_get/extractors/youtube.py
|
https://github.com/danxinshang/python
| true
| false
| true
|
@@ -138,7 +138,7 @@ class YouTube(VideoExtractor):
elif video_info['status'] == ['fail']:
if video_info['errorcode'] == ['150']:
video_page = get_content('http://www.youtube.com/watch?v=%s' % self.vid)
- ytplayer_config = json.loads(re.search('ytplayer.config\s*=\s*([^\n]+});', video_page).group(1))
+ ytplayer_config = json.loads(re.search('ytplayer.config\s*=\s*([^\n]+});ytplayer', video_page).group(1))
if 'title' in ytplayer_config['args']:
# 150 Restricted from playback on certain sites
|
ytplayer_config = json . loads ( re . search ( 'ytplayer.config\s*=\s*([^\n]+});' , video_page ) . group ( 1 ) )
|
ytplayer_config = json . loads ( re . search ( 'ytplayer.config\s*=\s*([^\n]+});ytplayer' , video_page ) . group ( 1 ) )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'ytplayer.config\\s*=\\s*([^\\n]+});'", 3, 56, 3, 90], "'ytplayer.config\\s*=\\s*([^\\n]+});ytplayer'"]]
|
mu-client-pypy
|
781a6b0e24e84702be8fc7e84a04e81ce331849d
|
4e2b57e980d750117e0d81981790b4cd1201eb2b
|
pypy/module/pypyjit/test_pypy_c/test__ffi.py
|
https://github.com/wdv4758h/mu-client-pypy
| true
| false
| true
|
@@ -4,7 +4,7 @@ from pypy.module.pypyjit.test_pypy_c.test_00_model import BaseTestPyPyC
class Test__ffi(BaseTestPyPyC):
def test__ffi_call(self):
- from pypy.rlib.test.test_libffi import get_libm_name
+ from pypy.rlib.test.test_clibffi import get_libm_name
def main(libm_name):
try:
from _ffi import CDLL, types
|
from pypy . rlib . test . test_libffi import get_libm_name
|
from pypy . rlib . test . test_clibffi import get_libm_name
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:test_libffi", 3, 29, 3, 40], "test_clibffi"]]
|
UIWorkshopTest
|
1ba97b96c82753464d0c52a040b49f8eefc4941c
|
2f27dfa378e01b594656116f713d83b2b8f2edda
|
setup/swc-installation-test-2.py
|
https://github.com/CamdenClark/UIWorkshopTest
| true
| false
| true
|
@@ -416,7 +416,7 @@ class CommandDependency (Dependency):
p = _subprocess.Popen(
[command] + list(self.version_options), stdin=popen_stdin,
stdout=_subprocess.PIPE, stderr=_subprocess.PIPE,
- close_fds=True, shell=False, universal_newlines=True)
+ universal_newlines=True)
except OSError as e:
raise DependencyError(
checker=self,
|
p = _subprocess . Popen ( [ command ] + list ( self . version_options ) , stdin = popen_stdin , stdout = _subprocess . PIPE , stderr = _subprocess . PIPE , close_fds = True , shell = False , universal_newlines = True )
|
p = _subprocess . Popen ( [ command ] + list ( self . version_options ) , stdin = popen_stdin , stdout = _subprocess . PIPE , stderr = _subprocess . PIPE , universal_newlines = True )
|
SAME_FUNCTION_LESS_ARGS
|
[["Delete", ["identifier:close_fds", 3, 17, 3, 26]], ["Delete", ["=:=", 3, 26, 3, 27]], ["Delete", ["true:True", 3, 27, 3, 31]], ["Delete", ["keyword_argument", 3, 17, 3, 31]], ["Delete", [",:,", 3, 31, 3, 32]], ["Delete", ["identifier:shell", 3, 33, 3, 38]], ["Delete", ["=:=", 3, 38, 3, 39]], ["Delete", ["false:False", 3, 39, 3, 44]], ["Delete", ["keyword_argument", 3, 33, 3, 44]], ["Delete", [",:,", 3, 44, 3, 45]]]
|
zulip
|
839300d781abb1135ec1d5915d55583b0fdfcf62
|
f081ee71201db31dfcac7c843663e62e2418678c
|
zerver/lib/import_realm.py
|
https://github.com/pilosus/zulip
| true
| false
| false
|
@@ -327,7 +327,7 @@ def re_map_foreign_keys_many_to_many(data: TableData,
for item in data[table]:
- old_id_list = item['field_name']
+ old_id_list = item[field_name]
new_id_list = re_map_foreign_keys_many_to_many_internal(
table, field_name, related_table, old_id_list, verbose)
item[field_name] = new_id_list
|
old_id_list = item [ 'field_name' ]
|
old_id_list = item [ field_name ]
|
SINGLE_TOKEN
|
[["Insert", ["subscript", 1, 23, 1, 41], ["identifier:field_name", "T"], 2], ["Delete", ["string:'field_name'", 1, 28, 1, 40]]]
|
xarray
|
bc956f14cfc69d5efcdbd4625d326b69645d88a3
|
72e4a908eebc763a66a0922b8385e25cb2cc01d5
|
xray/conventions.py
|
https://github.com/shoyer/xarray
| true
| false
| false
|
@@ -12,7 +12,7 @@ from .core import indexing, ops, utils
from .core.formatting import format_timestamp, first_n_items
from .core.variable import as_variable, Variable
from .core.pycompat import (iteritems, bytes_type, unicode_type, OrderedDict,
- PY3)
+ PY3, basestring)
# standard calendars recognized by netcdftime
|
from . core . pycompat import ( iteritems , bytes_type , unicode_type , OrderedDict , PY3 )
|
from . core . pycompat import ( iteritems , bytes_type , unicode_type , OrderedDict , PY3 , basestring )
|
SINGLE_STMT
|
[["Insert", ["import_from_statement", 2, 1, 3, 33], [",:,", "T"], 13], ["Insert", ["import_from_statement", 2, 1, 3, 33], ["dotted_name", "N0"], 14], ["Insert", "N0", ["identifier:basestring", "T"], 0]]
|
vestasi-update-erp
|
bf492122f8a3559431192ee15b1d586f5de68f85
|
38d0ed9f3a8a3a0ab343983b044ce8ec37d084b2
|
erpnext/stock/stock_ledger.py
|
https://github.com/suyashphadtare/vestasi-update-erp
| true
| false
| true
|
@@ -269,7 +269,7 @@ def get_moving_average_values(qty_after_transaction, sle, valuation_rate):
elif not valuation_rate:
valuation_rate = get_valuation_rate(sle.item_code, sle.warehouse)
- return abs(valuation_rate)
+ return abs(flt(valuation_rate))
def get_fifo_values(qty_after_transaction, sle, stock_queue):
incoming_rate = flt(sle.incoming_rate)
|
return abs ( valuation_rate )
|
return abs ( flt ( valuation_rate ) )
|
ADD_FUNCTION_AROUND_EXPRESSION
|
[["Insert", ["argument_list", 3, 12, 3, 28], ["(:(", "T"], 0], ["Insert", ["argument_list", 3, 12, 3, 28], ["call", "N0"], 1], ["Insert", ["argument_list", 3, 12, 3, 28], ["):)", "T"], 2], ["Insert", "N0", ["identifier:flt", "T"], 0], ["Move", "N0", ["argument_list", 3, 12, 3, 28], 1]]
|
crowdbuttons
|
9ae705d6ff59e0abebd50da173016f94e6299d3c
|
2e87e27d2ba3bf71425f7e02c4c8fbe93af9d799
|
crowdapp/views.py
|
https://github.com/janetyc/crowdbuttons
| true
| false
| true
|
@@ -229,7 +229,7 @@ def get_guide(question_id):
guide_list.append("0")
guide_str = ",".join(guide_list)
- ans_str = str(staus_map[status])
+ ans_str = str(status_map[status])
return "%s:%s" % (guide_str, ans_str)
#return jsonify(success=1, data=data)
|
ans_str = str ( staus_map [ status ] )
|
ans_str = str ( status_map [ status ] )
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:staus_map", 3, 19, 3, 28], "status_map"]]
|
keras-example
|
48ae7217e482a1a3624d6e5380c972a653cacfaf
|
6f54b233f101323c55fc1d34696938713c2679b2
|
keras/backend/tensorflow_backend.py
|
https://github.com/yuayi521/keras-example
| true
| false
| false
|
@@ -1136,7 +1136,7 @@ def rnn(step_function, inputs, initial_states,
if mask is not None:
if go_backwards:
- mask = tf.reverse(mask, [True] + [False] * (ndim - 1))
+ mask = tf.reverse(mask, [True] + [False] * (ndim - 2))
# Transpose not supported by bool tensor types, hence round-trip to uint8.
mask = tf.cast(mask, tf.uint8)
|
mask = tf . reverse ( mask , [ True ] + [ False ] * ( ndim - 1 ) )
|
mask = tf . reverse ( mask , [ True ] + [ False ] * ( ndim - 2 ) )
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:1", 3, 68, 3, 69], "2"]]
|
sherpa
|
de979518aa31a770c5b35664345836271904ba82
|
c6a59b8f78e126cdfb8d2b6dad252dadf1209e46
|
sherpa/io.py
|
https://github.com/DougBurke/sherpa
| true
| false
| false
|
@@ -26,7 +26,7 @@ from exceptions import ValueError
import os
-_all__ = ('read_data', 'write_data', 'get_ascii_data', 'read_arrays',
+__all__ = ('read_data', 'write_data', 'get_ascii_data', 'read_arrays',
'write_arrays')
|
_all__ = ( 'read_data' , 'write_data' , 'get_ascii_data' , 'read_arrays' , 'write_arrays' )
|
__all__ = ( 'read_data' , 'write_data' , 'get_ascii_data' , 'read_arrays' , 'write_arrays' )
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:_all__", 3, 1, 3, 7], "__all__"]]
|
wxBot
|
6f889b0c0bfe51d22fc196379b47019940649bcf
|
9628358c36dae7cd0bf7178b22db200b8a32c107
|
bot.py
|
https://github.com/awesome-python/wxBot
| true
| false
| true
|
@@ -59,7 +59,7 @@ class TulingWXBot(WXBot):
if not self.robot_switch:
return
if msg['msg_type_id'] == 1 and msg['content']['type'] == 0: # reply to self
- self.button(msg)
+ self.auto_switch(msg)
elif msg['msg_type_id'] == 4 and msg['content']['type'] == 0: # text message from contact
self.send_msg_by_uid(self.tuling_auto_reply(msg['user']['id'], msg['content']['data']), msg['user']['id'])
elif msg['msg_type_id'] == 3: # group message
|
self . button ( msg )
|
self . auto_switch ( msg )
|
WRONG_FUNCTION_NAME
|
[["Update", ["identifier:button", 3, 18, 3, 24], "auto_switch"]]
|
deis
|
f9f60b3df7b078d2b23c97551853141200f7991d
|
887732e92e2641f6b6794f3a0b29fbd221e5efb4
|
docs/conf.py
|
https://github.com/robszumski/deis
| true
| false
| false
|
@@ -21,7 +21,7 @@ import sys
sys.path.insert(0, os.path.abspath('..'))
# create local_settings.py for SECRET_KEY if necessary
local_settings_path = os.path.abspath(
- os.path.join('..', 'deis', 'local_settings.py'))
+ os.path.join('..', 'controller', 'deis', 'local_settings.py'))
if not os.path.exists(local_settings_path):
with open(local_settings_path, 'w') as local_settings:
local_settings.write("SECRET_KEY = 'DummySecretKey'\n")
|
local_settings_path = os . path . abspath ( os . path . join ( '..' , 'deis' , 'local_settings.py' ) )
|
local_settings_path = os . path . abspath ( os . path . join ( '..' , 'controller' , 'deis' , 'local_settings.py' ) )
|
SAME_FUNCTION_MORE_ARGS
|
[["Move", ["string:'deis'", 3, 24, 3, 30], ["argument_list", 3, 17, 3, 52], 4], ["Insert", ["argument_list", 3, 17, 3, 52], ["string:'controller'", "T"], 3], ["Insert", ["argument_list", 3, 17, 3, 52], [",:,", "T"], 5]]
|
allianceauth
|
e6532025f8818aaa8326a15eb5a36263d582657d
|
3361d36bbf4dc00552878646e326edb7177327ed
|
eveonline/providers.py
|
https://github.com/Kaezon/allianceauth
| true
| false
| true
|
@@ -228,7 +228,7 @@ class EveProvider(object):
@python_2_unicode_compatible
class EveSwaggerProvider(EveProvider):
def __init__(self, token=None, adapter=None):
- self.client = esi_client_factory(token=token, Alliance='v1', Character='v4', Corporation='v2', Universe='v2')
+ self.client = esi_client_factory(token=token, Alliance='v3', Character='v4', Corporation='v2', Universe='v2')
self.adapter = adapter or self
def __str__(self):
|
self . client = esi_client_factory ( token = token , Alliance = 'v1' , Character = 'v4' , Corporation = 'v2' , Universe = 'v2' )
|
self . client = esi_client_factory ( token = token , Alliance = 'v3' , Character = 'v4' , Corporation = 'v2' , Universe = 'v2' )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'v1'", 3, 64, 3, 68], "'v3'"]]
|
hiplexpipe
|
92e7eecea46d560df6287e9f13b2856f8f28a5e8
|
e81527af3f0053d006ef95800f8a87dab0341a42
|
src/pipeline.py
|
https://github.com/jasteen/hiplexpipe
| true
| false
| true
|
@@ -222,6 +222,6 @@ def make_pipeline(state):
task_func=stages.apply_multicov,
name='apply_multicov',
input=output_from('sort_bam_picard'),
- output='coverage/all.coverage.txt')
+ output='coverage/all.multicov.txt')
return pipeline
|
output = 'coverage/all.coverage.txt'
|
output = 'coverage/all.multicov.txt'
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'coverage/all.coverage.txt'", 3, 16, 3, 43], "'coverage/all.multicov.txt'"]]
|
breezy
|
aa5bfe9261575ba0f1a389ae176b9e364c62bad1
|
46317f863d5885df50da6e4ff8b0fac372b247c6
|
bzrlib/transport/gio/__init__.py
|
https://github.com/jelmer/breezy
| true
| false
| true
|
@@ -566,7 +566,7 @@ class GioTransport(ConnectedTransport):
raise errors.PathError(path, extra=extra)
else:
mutter('unable to understand error for path: %s: %s', path, err)
- raise err
+ raise errors.PathError(path, extra="Unhandled gio error: " + str(err))
def get_test_permutations():
"""Return the permutations to be used in testing."""
|
raise err
|
raise errors . PathError ( path , extra = "Unhandled gio error: " + str ( err ) )
|
SINGLE_STMT
|
[["Insert", ["raise_statement", 3, 13, 3, 22], ["call", "N0"], 1], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Insert", "N1", ["identifier:errors", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:PathError", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["identifier:path", "T"], 1], ["Insert", "N2", [",:,", "T"], 2], ["Insert", "N2", ["keyword_argument", "N3"], 3], ["Insert", "N2", ["):)", "T"], 4], ["Insert", "N3", ["identifier:extra", "T"], 0], ["Insert", "N3", ["=:=", "T"], 1], ["Insert", "N3", ["binary_operator", "N4"], 2], ["Insert", "N4", ["string:\"Unhandled gio error: \"", "T"], 0], ["Insert", "N4", ["+:+", "T"], 1], ["Insert", "N4", ["call", "N5"], 2], ["Insert", "N5", ["identifier:str", "T"], 0], ["Insert", "N5", ["argument_list", "N6"], 1], ["Insert", "N6", ["(:(", "T"], 0], ["Move", "N6", ["identifier:err", 3, 19, 3, 22], 1], ["Insert", "N6", ["):)", "T"], 2]]
|
python-acoustics
|
6a9157d52f016bec71f9c807eb914a728a7fe495
|
b71330f8b18fdcd93d409c48bf6ea14504484f42
|
acoustics/bands.py
|
https://github.com/FRidh/python-acoustics
| true
| false
| true
|
@@ -121,7 +121,7 @@ def third2oct(levels, axis=None):
shape[axis] = shape[axis] // 3
shape.insert(axis+1, 3)
levels = np.reshape(levels, shape)
- return dbsum(levels, axis=axis+1)
+ return np.squeeze(dbsum(levels, axis=axis+1))
def _check_band_type(freqs):
|
return dbsum ( levels , axis = axis + 1 )
|
return np . squeeze ( dbsum ( levels , axis = axis + 1 ) )
|
ADD_FUNCTION_AROUND_EXPRESSION
|
[["Insert", ["call", 3, 12, 3, 38], ["attribute", "N0"], 0], ["Insert", ["call", 3, 12, 3, 38], ["argument_list", "N1"], 1], ["Insert", "N0", ["identifier:np", "T"], 0], ["Insert", "N0", [".:.", "T"], 1], ["Insert", "N0", ["identifier:squeeze", "T"], 2], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["call", 3, 12, 3, 38], 1], ["Insert", "N1", ["):)", "T"], 2]]
|
breezy
|
abcda53177fc72f411887eb9699f09cba4c42cad
|
fa8d72220e67ad2e1ff93f8cc5970d00f3628ad6
|
tools/generate_release_notes.py
|
https://github.com/breezy-team/breezy
| true
| false
| true
|
@@ -86,7 +86,7 @@ def output_news_file_sphinx(out_file, news_file_name):
def output_news_file_plain(out_file, news_file_name):
- with open(news_file_name, 'rb') as f:
+ with open(news_file_name, 'r') as f:
lines = f.readlines()
title = os.path.basename(news_file_name)[len('brz-'):-len('.txt')]
for line in lines:
|
with open ( news_file_name , 'rb' ) as f : lines = f . readlines ( )
|
with open ( news_file_name , 'r' ) as f : lines = f . readlines ( )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'rb'", 3, 31, 3, 35], "'r'"]]
|
badger-sett
|
13c252d050014145b7e95820067922def4e0876b
|
1ec0c1fe8ca5d6c3246413a710bf5a24726934cd
|
crawler.py
|
https://github.com/funilrys/badger-sett
| true
| false
| false
|
@@ -41,7 +41,7 @@ ap.add_argument('--browser', choices=[FIREFOX, CHROME], default=FIREFOX,
help='Browser to use for the scan')
ap.add_argument('--n-sites', type=int, default=2000,
help='Number of websites to visit on the crawl')
-ap.add_argument('--timeout', type=float, default=10,
+ap.add_argument('--timeout', type=float, default=30,
help='Amount of time to allow each site to load, in seconds')
ap.add_argument('--wait-time', type=float, default=5,
help='Amount of time to wait on each site after it loads, in seconds')
|
ap . add_argument ( '--timeout' , type = float , default = 10 , help = 'Amount of time to allow each site to load, in seconds' )
|
ap . add_argument ( '--timeout' , type = float , default = 30 , help = 'Amount of time to allow each site to load, in seconds' )
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:10", 3, 50, 3, 52], "30"]]
|
xunlei-lixian
|
73a08c0e50ce1915382954f949653a001b4ccbc4
|
2a7aea72599fd525ffa75c1838a9a50b3a0e5c0c
|
lixian.py
|
https://github.com/sndnvaps/xunlei-lixian
| true
| false
| false
|
@@ -39,7 +39,7 @@ class Logger:
logger = Logger()
class XunleiClient:
- page_size = 9999
+ page_size = 100
bt_page_size = 9999
def __init__(self, username=None, password=None, cookie_path=None, login=True):
self.username = username
|
page_size = 9999
|
page_size = 100
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:9999", 3, 14, 3, 18], "100"]]
|
vialectrum
|
0511d7e76f6be38425ff13c2c74b8792e4bfffa4
|
2c38e85a5032e82fd666ecb00eab4d7d5193982e
|
lib/wallet.py
|
https://github.com/vialectrum/vialectrum
| true
| false
| true
|
@@ -1258,7 +1258,7 @@ class Deterministic_Wallet(Abstract_Wallet):
return True
def get_action(self):
- if not self.get_master_public_keys():
+ if not self.get_master_public_key():
return 'create_seed'
if not self.accounts:
return 'create_accounts'
|
if not self . get_master_public_keys ( ) : return 'create_seed'
|
if not self . get_master_public_key ( ) : return 'create_seed'
|
WRONG_FUNCTION_NAME
|
[["Update", ["identifier:get_master_public_keys", 3, 21, 3, 43], "get_master_public_key"]]
|
anaconda
|
6000f812565e59281b1c3fc6634b50f2d6e3c7a9
|
8a06a692cfa03835010442fae9807749463ce461
|
autopart.py
|
https://github.com/projectatomic/anaconda
| true
| false
| true
|
@@ -1362,7 +1362,7 @@ def doAutoPartition(dir, diskset, partitions, intf, instClass, dispatch):
extra = _("\n\nPress 'OK' to reboot your system.")
else:
extra = _("\n\nYou can choose a different automatic partitioning "
- "options or click 'Back' to select manual partitioning."
+ "option, or click 'Back' to select manual partitioning."
"\n\nPress 'OK' to continue.")
intf.messageWindow(_("Automatic Partitioning Errors"),
|
else : extra = _ ( "\n\nYou can choose a different automatic partitioning " "options or click 'Back' to select manual partitioning." "\n\nPress 'OK' to continue." )
|
else : extra = _ ( "\n\nYou can choose a different automatic partitioning " "option, or click 'Back' to select manual partitioning." "\n\nPress 'OK' to continue." )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"options or click 'Back' to select manual partitioning.\"", 3, 23, 3, 79], "\"option, or click 'Back' to select manual partitioning.\""]]
|
sopel
|
47d16e9a72029217c297c39f57a4602c1e67057c
|
7eabed9fc5c04789f09528010bba73713574d15b
|
sopel/loader.py
|
https://github.com/calzoneman/sopel
| true
| false
| true
|
@@ -147,7 +147,7 @@ def clean_callable(func, config):
doc = trim_docstring(func.__doc__)
example = None
- func.unblockable = getattr(func, 'unblockable', True)
+ func.unblockable = getattr(func, 'unblockable', False)
func.priority = getattr(func, 'priority', 'medium')
func.thread = getattr(func, 'thread', True)
func.rate = getattr(func, 'rate', 0)
|
func . unblockable = getattr ( func , 'unblockable' , True )
|
func . unblockable = getattr ( func , 'unblockable' , False )
|
CHANGE_BOOLEAN_LITERAL
|
[["Insert", ["argument_list", 3, 31, 3, 58], ["false:False", "T"], 5], ["Delete", ["true:True", 3, 53, 3, 57]]]
|
twarc
|
13b263051264c0821bf2d8b3963c06a900b2839e
|
7716ca031ad85870ca91963a756f6c62af2d221a
|
utils/wordcloud.py
|
https://github.com/gwu-libraries/twarc
| true
| false
| true
|
@@ -39,7 +39,7 @@ def main():
top_words = sorted_words[0:MAX_WORDS]
words = []
- count_range = word_counts[top_words[0]] - word_counts[top_words[-1]]
+ count_range = word_counts[top_words[0]] - word_counts[top_words[-1]] + 1
size_ratio = 100.0 / count_range
for word in top_words:
size = int(word_counts[word] * size_ratio) + 15
|
count_range = word_counts [ top_words [ 0 ] ] - word_counts [ top_words [ - 1 ] ]
|
count_range = word_counts [ top_words [ 0 ] ] - word_counts [ top_words [ - 1 ] ] + 1
|
SINGLE_STMT
|
[["Move", ["binary_operator", 3, 16, 3, 70], ["binary_operator", 3, 16, 3, 70], 0], ["Insert", ["binary_operator", 3, 16, 3, 70], ["+:+", "T"], 1], ["Insert", ["binary_operator", 3, 16, 3, 70], ["integer:1", "T"], 2]]
|
mycroft-core
|
0b8e88a325db8a8fcf0e95f78546939fed68a8de
|
9ff7fd5452a8db71073847b500ba30db1cf99a21
|
mycroft/util/lang/parse_en.py
|
https://github.com/forslund/mycroft-core
| true
| false
| true
|
@@ -178,7 +178,7 @@ def _initialize_number_data(short_scale):
short_scale boolean:
Returns:
- (dict, dict, dict)
+ (set(str), dict(str, number), dict(str, number))
multiplies, string_num_ordinal, string_num_scale
|
short_scale boolean : Returns : ( dict , dict , dict )
|
short_scale boolean : Returns : ( set ( str ) , dict ( str , number ) , dict ( str , number ) )
|
SINGLE_STMT
|
[["Insert", ["tuple", 3, 9, 3, 27], ["call", "N0"], 1], ["Insert", ["tuple", 3, 9, 3, 27], ["call", "N1"], 4], ["Insert", ["tuple", 3, 9, 3, 27], [",:,", "T"], 5], ["Insert", ["tuple", 3, 9, 3, 27], ["call", "N2"], 6], ["Insert", ["tuple", 3, 9, 3, 27], ["):)", "T"], 7], ["Update", ["identifier:dict", 3, 10, 3, 14], "set"], ["Move", "N0", ["identifier:dict", 3, 10, 3, 14], 0], ["Insert", "N0", ["argument_list", "N3"], 1], ["Move", "N1", ["identifier:dict", 3, 22, 3, 26], 0], ["Insert", "N1", ["argument_list", "N4"], 1], ["Insert", "N2", ["identifier:dict", "T"], 0], ["Insert", "N2", ["argument_list", "N5"], 1], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["identifier:str", "T"], 1], ["Move", "N3", ["):)", 3, 26, 3, 27], 2], ["Insert", "N4", ["(:(", "T"], 0], ["Insert", "N4", ["identifier:str", "T"], 1], ["Insert", "N4", [",:,", "T"], 2], ["Insert", "N4", ["identifier:number", "T"], 3], ["Insert", "N4", ["):)", "T"], 4], ["Insert", "N5", ["(:(", "T"], 0], ["Insert", "N5", ["identifier:str", "T"], 1], ["Insert", "N5", [",:,", "T"], 2], ["Insert", "N5", ["identifier:number", "T"], 3], ["Insert", "N5", ["):)", "T"], 4], ["Delete", [",:,", 3, 14, 3, 15]], ["Delete", ["identifier:dict", 3, 16, 3, 20]]]
|
cmgtools-lite
|
63954c5f19bbbb425a81cfdc358afd7d761ccf20
|
dda598a74954571076c7e0a1b686d07b8b04004f
|
TTHAnalysis/cfg/run_susyMultilepton_cfg.py
|
https://github.com/stiegerb/cmgtools-lite
| true
| false
| false
|
@@ -90,7 +90,7 @@ if analysis in ['SOS']:
# otherwise with only absIso cut at 10 GeV and no relIso we risk cleaning away good jets
if isolation == "miniIso":
- if analysis=="ttH":
+ if (analysis=="ttH") or (analysis =="SOS"):
lepAna.loose_muon_isoCut = lambda muon : muon.miniRelIso < 0.4 and muon.sip3D() < 8
lepAna.loose_electron_isoCut = lambda elec : elec.miniRelIso < 0.4 and elec.sip3D() < 8
elif analysis=="susy":
|
if analysis == "ttH" : lepAna . loose_muon_isoCut = lambda muon : muon . miniRelIso < 0.4 and muon . sip3D ( ) < 8 lepAna . loose_electron_isoCut = lambda elec : elec . miniRelIso < 0.4 and elec . sip3D ( ) < 8 elif analysis == "susy" :
|
if ( analysis == "ttH" ) or ( analysis == "SOS" ) : lepAna . loose_muon_isoCut = lambda muon : muon . miniRelIso < 0.4 and muon . sip3D ( ) < 8 lepAna . loose_electron_isoCut = lambda elec : elec . miniRelIso < 0.4 and elec . sip3D ( ) < 8 elif analysis == "susy" :
|
LESS_SPECIFIC_IF
|
[["Insert", ["if_statement", 3, 5, 6, 27], ["boolean_operator", "N0"], 1], ["Insert", "N0", ["parenthesized_expression", "N1"], 0], ["Insert", "N0", ["or:or", "T"], 1], ["Insert", "N0", ["parenthesized_expression", "N2"], 2], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["comparison_operator", 3, 8, 3, 23], 1], ["Insert", "N1", ["):)", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["comparison_operator", "N3"], 1], ["Insert", "N2", ["):)", "T"], 2], ["Insert", "N3", ["identifier:analysis", "T"], 0], ["Insert", "N3", ["==:==", "T"], 1], ["Insert", "N3", ["string:\"SOS\"", "T"], 2]]
|
pychron
|
6a9ca2821212195565245434cfaf46070024b4d7
|
67051b34fe89501bddd881ec3d07ce4c8fe2d2c0
|
pychron/updater/packager.py
|
https://github.com/UManPychron/pychron
| true
| false
| true
|
@@ -111,7 +111,7 @@ def resource_path(dest, name):
def copy_resource_dir(dest, src, name=None):
if os.path.exists(src):
if name is None:
- name = os.path.basename(dest, src)
+ name = os.path.basename(src)
shutil.copytree(src, resource_path(dest, name))
else:
print '++++++++++++++++++++++ Not a valid Resource {} +++++++++++++++++++++++'.format(src)
|
name = os . path . basename ( dest , src )
|
name = os . path . basename ( src )
|
SAME_FUNCTION_LESS_ARGS
|
[["Delete", ["identifier:dest", 3, 37, 3, 41]], ["Delete", [",:,", 3, 41, 3, 42]]]
|
exscript
|
7fde320497dcda9ee77c62c569a5ecd6226bb488
|
becfa1cb7f7404f60b5ee4509ab7e385960401fe
|
src/Exscript/util/interact.py
|
https://github.com/knipknap/exscript
| true
| false
| false
|
@@ -180,7 +180,7 @@ def prompt(key,
if history is None:
history = InputHistory()
- default = history.get(key, default)
+ default = history.get(key, str(default))
while True:
if default is None:
value = raw_input('%s: ' % message)
|
default = history . get ( key , default )
|
default = history . get ( key , str ( default ) )
|
ADD_FUNCTION_AROUND_EXPRESSION
|
[["Insert", ["argument_list", 2, 26, 2, 40], ["call", "N0"], 3], ["Insert", ["argument_list", 2, 26, 2, 40], ["):)", "T"], 4], ["Insert", "N0", ["identifier:str", "T"], 0], ["Insert", "N0", ["argument_list", "N1"], 1], ["Insert", "N1", ["(:(", "T"], 0], ["Move", "N1", ["identifier:default", 2, 32, 2, 39], 1], ["Move", "N1", ["):)", 2, 39, 2, 40], 2]]
|
awx
|
54f6426c0c4634417d3955fe09827af276aa1f3a
|
0fa5ba606bc5731fb8b8d97223fe5921bc4639af
|
awx/main/management/commands/inventory_import.py
|
https://github.com/cchurch/awx
| true
| false
| true
|
@@ -547,7 +547,7 @@ class Command(BaseCommand):
# for each host in a mem group, add it to the parents to which it belongs
# FIXME: where it does not already exist
for (k,v) in group_names.iteritems():
- LOGGER.info("adding parent arrangements for %s k")
+ LOGGER.info("adding parent arrangements for %s" % k)
db_group = Group.objects.get(name=k, inventory__pk=inventory.pk)
mem_hosts = v.hosts
for h in mem_hosts:
|
LOGGER . info ( "adding parent arrangements for %s k" )
|
LOGGER . info ( "adding parent arrangements for %s" % k )
|
SINGLE_STMT
|
[["Insert", ["argument_list", 3, 24, 3, 63], ["binary_operator", "N0"], 1], ["Update", ["string:\"adding parent arrangements for %s k\"", 3, 25, 3, 62], "\"adding parent arrangements for %s\""], ["Move", "N0", ["string:\"adding parent arrangements for %s k\"", 3, 25, 3, 62], 0], ["Insert", "N0", ["%:%", "T"], 1], ["Insert", "N0", ["identifier:k", "T"], 2]]
|
Sick-Beard-TPB
|
22c56d3ffa36103f3253651e82f5a830024d60d1
|
fcf42713a141a3319e959e8904a1b9ba83d8731e
|
sickbeard/processTV.py
|
https://github.com/yanicklandry/Sick-Beard-TPB
| true
| false
| true
|
@@ -601,7 +601,7 @@ def processFile(fileName, downloadDir=None, nzbName=None):
script_cmd = shlex.split(curScriptName) + [rootEp.location, biggestFileName, str(tvdb_id), str(season), str(episode), str(rootEp.airdate)]
returnStr += logHelper("Executing command "+str(script_cmd))
try:
- p = subprocess.Popen(script_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ p = subprocess.Popen(script_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=sickbeard.PROG_DIR)
out, err = p.communicate()
returnStr += logHelper("Script result: "+str(out), logger.DEBUG)
except OSError, e:
|
p = subprocess . Popen ( script_cmd , stdout = subprocess . PIPE , stderr = subprocess . STDOUT )
|
p = subprocess . Popen ( script_cmd , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , cwd = sickbeard . PROG_DIR )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 33, 3, 95], [",:,", "T"], 6], ["Insert", ["argument_list", 3, 33, 3, 95], ["keyword_argument", "N0"], 7], ["Insert", "N0", ["identifier:cwd", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["attribute", "N1"], 2], ["Insert", "N1", ["identifier:sickbeard", "T"], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:PROG_DIR", "T"], 2]]
|
ryu
|
42a097c596e7ccb35de7762b165c939999ddd0ce
|
c519c7d86f015e924e68d23af9d58449fba404d1
|
ryu/ofproto/ofproto_v1_3.py
|
https://github.com/jkoelker/ryu
| true
| false
| false
|
@@ -685,7 +685,7 @@ OFP_BUCKET_COUNTER_SIZE = 16
assert calcsize(OFP_BUCKET_COUNTER_PACK_STR) == OFP_BUCKET_COUNTER_SIZE
# struct ofp_group_desc_stats
-OFP_GROUP_DESC_STATS_PACK_STR = '!HBBI'
+OFP_GROUP_DESC_STATS_PACK_STR = '!HBxI'
OFP_GROUP_DESC_STATS_SIZE = 8
assert calcsize(OFP_GROUP_DESC_STATS_PACK_STR) == OFP_GROUP_DESC_STATS_SIZE
|
OFP_GROUP_DESC_STATS_PACK_STR = '!HBBI'
|
OFP_GROUP_DESC_STATS_PACK_STR = '!HBxI'
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'!HBBI'", 3, 33, 3, 40], "'!HBxI'"]]
|
mongodb-backup-system
|
907f444d2fb4b163614b466e2c5914db8648cc19
|
2f1f3906c966fcf7d66bc677b82a8f5e59a410c6
|
mbs/errors.py
|
https://github.com/mongolab/mongodb-backup-system
| true
| false
| false
|
@@ -204,7 +204,7 @@ class BadCollectionNameError(DumpError):
"drop these collection(s)")
###############################################################################
-class InvalidBSONObjSizeError(RetriableDumpError):
+class InvalidBSONObjSizeError(DumpError):
pass
###############################################################################
|
class InvalidBSONObjSizeError ( RetriableDumpError ) : pass
|
class InvalidBSONObjSizeError ( DumpError ) : pass
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:RetriableDumpError", 3, 31, 3, 49], "DumpError"]]
|
salt
|
37bc3ad8fd1993b2a400523dcd3475384549e9f1
|
5b23b91ac60924d249fe1869ad942849171ce372
|
tests/integration/modules/gem.py
|
https://github.com/E-LLP/salt
| true
| false
| true
|
@@ -67,7 +67,7 @@ class GemModuleTest(integration.ModuleCase):
ret = self.run_function('gem.list_upgrades')
self.assertIn('thor', ret)
- self.run_function('gem.uninstalled', [OLD_GEM])
+ self.run_function('gem.uninstall', [OLD_GEM])
def test_sources_add_remove(self):
|
self . run_function ( 'gem.uninstalled' , [ OLD_GEM ] )
|
self . run_function ( 'gem.uninstall' , [ OLD_GEM ] )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'gem.uninstalled'", 3, 27, 3, 44], "'gem.uninstall'"]]
|
iris
|
0232f31ec54af06648504affd56cf8a404700e68
|
d8335ced9b37f7fed784139d073ba3d7c6711e7e
|
lib/iris/fileformats/netcdf.py
|
https://github.com/bjlittle/iris
| true
| false
| false
|
@@ -714,7 +714,7 @@ class Saver(object):
using `numpy.around(scale*data)/scale`, where `scale = 2**bits`,
and `bits` is determined so that a precision of 0.1 is retained (in
this case `bits=4`). From
- http://www.cdc.noaa.gov/cdc/conventions/cdc_netcdf_standard.shtml:
+ http://www.esrl.noaa.gov/psd/data/gridded/conventions/cdc_netcdf_standard.shtml:
"least_significant_digit -- power of ten of the smallest decimal
place in unpacked data that is a reliable value". Default is
`None`, or no quantization, or 'lossless' compression.
|
http : // www . cdc . noaa . gov / cdc / conventions / cdc_netcdf_standard . shtml : " least_significant_digit - - power of ten of the smallest decimal
|
http : // www . esrl . noaa . gov / psd / data / gridded / conventions / cdc_netcdf_standard . shtml : " least_significant_digit - - power of ten of the smallest decimal
|
CHANGE_BINARY_OPERAND
|
[["Insert", ["binary_operator", 3, 20, 3, 40], ["binary_operator", "N0"], 0], ["Insert", ["binary_operator", 3, 20, 3, 40], ["/:/", "T"], 1], ["Insert", ["binary_operator", 3, 20, 3, 40], ["identifier:gridded", "T"], 2], ["Insert", "N0", ["binary_operator", "N1"], 0], ["Insert", "N0", ["/:/", "T"], 1], ["Insert", "N0", ["identifier:data", "T"], 2], ["Move", "N1", ["attribute", 3, 20, 3, 36], 0], ["Move", "N1", ["/:/", 3, 36, 3, 37], 1], ["Update", ["identifier:cdc", 3, 37, 3, 40], "psd"], ["Move", "N1", ["identifier:cdc", 3, 37, 3, 40], 2], ["Update", ["identifier:cdc", 3, 24, 3, 27], "esrl"]]
|
py.netfs
|
011147a9898ada5dbf509f7ce3b4b7ca78ceb25d
|
75971d26275c2691eecaeb0f918416435f0903d0
|
score/netfs/cli.py
|
https://github.com/score-framework/py.netfs
| true
| false
| true
|
@@ -100,7 +100,7 @@ def proxy(host, port, backend, logconf=None):
@click.argument('file', type=click.File(mode='wb'))
def download(host, port, path, file, logconf=None):
init_logging(logconf)
- conf = netfs.init({'host': host, 'port': port, 'cachedir': '.'})
+ conf = netfs.init({'server': '{}:{}'.format(host, port), 'cachedir': '.'})
conf.connect().download(path, file)
|
conf = netfs . init ( { 'host' : host , 'port' : port , 'cachedir' : '.' } )
|
conf = netfs . init ( { 'server' : '{}:{}' . format ( host , port ) , 'cachedir' : '.' } )
|
SINGLE_STMT
|
[["Insert", ["dictionary", 3, 23, 3, 68], ["pair", "N0"], 1], ["Update", ["string:'host'", 3, 24, 3, 30], "'server'"], ["Move", "N0", ["string:'host'", 3, 24, 3, 30], 0], ["Move", "N0", [":::", 3, 30, 3, 31], 1], ["Insert", "N0", ["call", "N1"], 2], ["Insert", "N1", ["attribute", "N2"], 0], ["Insert", "N1", ["argument_list", "N3"], 1], ["Insert", "N2", ["string:'{}:{}'", "T"], 0], ["Insert", "N2", [".:.", "T"], 1], ["Insert", "N2", ["identifier:format", "T"], 2], ["Insert", "N3", ["(:(", "T"], 0], ["Insert", "N3", ["identifier:host", "T"], 1], ["Insert", "N3", [",:,", "T"], 2], ["Move", "N3", ["identifier:port", 3, 46, 3, 50], 3], ["Insert", "N3", ["):)", "T"], 4], ["Delete", ["identifier:host", 3, 32, 3, 36]], ["Delete", ["pair", 3, 24, 3, 36]], ["Delete", [",:,", 3, 36, 3, 37]], ["Delete", ["string:'port'", 3, 38, 3, 44]], ["Delete", [":::", 3, 44, 3, 45]], ["Delete", ["pair", 3, 38, 3, 50]]]
|
SDNRacer
|
c6584d48e8cedc2ede860593d7308af5e9dc6a71
|
2a7c9bc92390973090094da3d822cf63d489c314
|
sts/entities.py
|
https://github.com/nsg-ethz/SDNRacer
| true
| false
| true
|
@@ -679,7 +679,7 @@ class BigSwitchController(Controller):
if self.config.restart_cmd == "":
raise RuntimeError("No command found to restart controller %s!" % self.label)
self.log.info("Relaunching controller %s: %s" % (self.label, " ".join(self.config.expanded_restart_cmd)))
- self.process = popen_filtered("[%s]" % self.label, self.config.expanded_start_cmd, self.config.cwd)
+ self.process = popen_filtered("[%s]" % self.label, self.config.expanded_restart_cmd, self.config.cwd)
self.alive = True
def check_status(self, simulation):
|
self . process = popen_filtered ( "[%s]" % self . label , self . config . expanded_start_cmd , self . config . cwd )
|
self . process = popen_filtered ( "[%s]" % self . label , self . config . expanded_restart_cmd , self . config . cwd )
|
CHANGE_ATTRIBUTE_USED
|
[["Update", ["identifier:expanded_start_cmd", 3, 70, 3, 88], "expanded_restart_cmd"]]
|
brothercodes
|
1663623fadf80dc64a6f89826decfa2cd04a158a
|
b58c8205584640ca746e9cad242e6735c665c981
|
django/core/handlers/base.py
|
https://github.com/anilrzk/brothercodes
| true
| false
| true
|
@@ -20,7 +20,7 @@ class BaseHandler:
try:
dot = middleware_path.rindex('.')
except ValueError:
- raise exceptions.ImproperlyConfigured, '%s isn\'t look like a middleware module' % middleware_path
+ raise exceptions.ImproperlyConfigured, '%s isn\'t a middleware module' % middleware_path
mw_module, mw_classname = middleware_path[:dot], middleware_path[dot+1:]
try:
mod = __import__(mw_module, '', '', [''])
|
raise exceptions . ImproperlyConfigured , '%s isn\'t look like a middleware module' % middleware_path
|
raise exceptions . ImproperlyConfigured , '%s isn\'t a middleware module' % middleware_path
|
CHANGE_BINARY_OPERAND
|
[["Update", ["string:'%s isn\\'t look like a middleware module'", 3, 56, 3, 97], "'%s isn\\'t a middleware module'"]]
|
evennia
|
609c784b140be1303194c113cb009fbfa9bbfce1
|
79f5a4a93ade2487f8bdc47fddf9e2d6ad7f7063
|
evennia/commands/default/player.py
|
https://github.com/evennia/evennia
| true
| false
| true
|
@@ -446,7 +446,7 @@ class CmdOption(MuxPlayerCommand):
row.append("%s%s" % (saved, changed))
table.add_row(*row)
- self.msg("|wClient settings (%s):|n\n%s|n" % (self.session.protocol_key, table))
+ self.msg("{wClient settings (%s):|n\n%s|n" % (self.session.protocol_key, table))
return
|
self . msg ( "|wClient settings (%s):|n\n%s|n" % ( self . session . protocol_key , table ) )
|
self . msg ( "{wClient settings (%s):|n\n%s|n" % ( self . session . protocol_key , table ) )
|
CHANGE_BINARY_OPERAND
|
[["Update", ["string:\"|wClient settings (%s):|n\\n%s|n\"", 3, 22, 3, 55], "\"{wClient settings (%s):|n\\n%s|n\""]]
|
submin
|
b0ed9c380ce6133b536763b5658bb635425130fd
|
2f574177f9e6104b93ae57ae8364616976131000
|
packages/submin/subminadmin/c_config.py
|
https://github.com/cafeday/submin
| true
| false
| true
|
@@ -120,7 +120,7 @@ sqlite_path = os.path.join(os.path.dirname(__file__), "submin.db")
'enabled_trac': 'no',
'session_salt': self.session_salt(),
'env_path': '/bin:/usr/bin:/usr/local/bin:/opt/local/bin',
- 'vcs_plugins': ','.join(self.vcs_plugins()),
+ 'vcs_plugins': 'svn',
}
for (key, value) in default_options.iteritems():
options.set_value(key, value)
|
'vcs_plugins' : ',' . join ( self . vcs_plugins ( ) ) ,
|
'vcs_plugins' : 'svn' ,
|
SINGLE_STMT
|
[["Move", ["expression_statement", 3, 4, 3, 48], ["concatenated_string", 3, 4, 3, 22], 0], ["Update", ["string:','", 3, 19, 3, 22], "'svn'"], ["Delete", [".:.", 3, 22, 3, 23]], ["Delete", ["identifier:join", 3, 23, 3, 27]], ["Delete", ["attribute", 3, 4, 3, 27]], ["Delete", ["(:(", 3, 27, 3, 28]], ["Delete", ["identifier:self", 3, 28, 3, 32]], ["Delete", [".:.", 3, 32, 3, 33]], ["Delete", ["identifier:vcs_plugins", 3, 33, 3, 44]], ["Delete", ["attribute", 3, 28, 3, 44]], ["Delete", ["(:(", 3, 44, 3, 45]], ["Delete", ["):)", 3, 45, 3, 46]], ["Delete", ["argument_list", 3, 44, 3, 46]], ["Delete", ["call", 3, 28, 3, 46]], ["Delete", ["):)", 3, 46, 3, 47]], ["Delete", ["argument_list", 3, 27, 3, 47]], ["Delete", ["call", 3, 4, 3, 47]]]
|
salt
|
91bb3913c142f9a4e58e68098461c9dc09310990
|
f833b6f5b6e0ca5fd1885902444aa32cb4ed1369
|
salt/modules/pip.py
|
https://github.com/penta-srl/salt
| true
| false
| true
|
@@ -118,7 +118,7 @@ def _get_pip_bin(bin_env):
if not bin_env:
- which_result = __salt__['cmd.which_bin'](['pip2', 'pip', 'pip-python'])
+ which_result = __salt__['cmd.which_bin'](['pip', 'pip2', 'pip-python'])
if which_result is None:
raise CommandNotFoundError('Could not find a `pip` binary')
if salt.utils.is_windows():
|
which_result = __salt__ [ 'cmd.which_bin' ] ( [ 'pip2' , 'pip' , 'pip-python' ] )
|
which_result = __salt__ [ 'cmd.which_bin' ] ( [ 'pip' , 'pip2' , 'pip-python' ] )
|
SINGLE_STMT
|
[["Move", ["string:'pip2'", 1, 51, 1, 57], ["list", 1, 50, 1, 79], 3], ["Move", [",:,", 1, 57, 1, 58], ["list", 1, 50, 1, 79], 4]]
|
django-blog-zinnia
|
c50594ed37bb01fa3d6be37924db6e8147775797
|
59c55aba0da473306365385675eab411be5b85af
|
zinnia/management/commands/wp2zinnia.py
|
https://github.com/xingjianpan/django-blog-zinnia
| true
| false
| true
|
@@ -404,7 +404,7 @@ class Command(LabelCommand):
'comment': content,
'submit_date': submit_date,
'ip_address': comment_node.find(
- '{%s}comment_author_IP' % WP_NS).text or '',
+ '{%s}comment_author_IP' % WP_NS).text or None,
'is_public': is_public,
'is_removed': is_removed, }
comment = comments.get_model()(**comment_dict)
|
comment_node . find ( '{%s}comment_author_IP' % WP_NS ) . text or '' ,
|
comment_node . find ( '{%s}comment_author_IP' % WP_NS ) . text or None ,
|
CHANGE_BINARY_OPERAND
|
[["Insert", ["boolean_operator", 2, 31, 3, 64], ["none:None", "T"], 2], ["Delete", ["string:''", 3, 62, 3, 64]]]
|
electrum
|
c07832a1fe7a89fc3e55953a261d1907b0110e40
|
077bf77d1fd67f5c9c7510feac3bcebefbdeb3db
|
lib/gui_qt.py
|
https://github.com/learn-alexuser01/electrum
| true
| false
| true
|
@@ -1178,7 +1178,7 @@ class ElectrumWindow(QMainWindow):
fee_e.setText("%s"% str( Decimal( self.wallet.fee)/100000000 ) )
grid.addWidget(QLabel(_('Transaction fee')), 2, 0)
grid.addWidget(fee_e, 2, 1)
- grid.addWidget(HelpButton('Fee per transaction input. Transactions involving multiple inputs tend to require a higher fee. Recommended value: 0.0005'), 2, 2)
+ grid.addWidget(HelpButton('Fee per transaction input. Transactions involving multiple inputs tend to require a higher fee. Recommended value: 0.001'), 2, 2)
fee_e.textChanged.connect(lambda: numbify(fee_e,False))
nz_e = QLineEdit()
|
grid . addWidget ( HelpButton ( 'Fee per transaction input. Transactions involving multiple inputs tend to require a higher fee. Recommended value: 0.0005' ) , 2 , 2 )
|
grid . addWidget ( HelpButton ( 'Fee per transaction input. Transactions involving multiple inputs tend to require a higher fee. Recommended value: 0.001' ) , 2 , 2 )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'Fee per transaction input. Transactions involving multiple inputs tend to require a higher fee. Recommended value: 0.0005'", 3, 35, 3, 158], "'Fee per transaction input. Transactions involving multiple inputs tend to require a higher fee. Recommended value: 0.001'"]]
|
xpdAn
|
2b8f67e4773ad3e53293efe01328f87d2b126616
|
c60107bb445d5f0e3374a98ea3ed2447210e4ada
|
xpdan/tests/conftest.py
|
https://github.com/xpdAcq/xpdAn
| true
| false
| false
|
@@ -23,7 +23,7 @@ import pytest
from xpdan.fuzzybroker import FuzzyBroker
from xpdan.glbl_gen import make_glbl, load_configuration
from skbeam.io.fit2d import fit2d_save
-from xpdan.tests.utils import insert_imgs
+from .utils import insert_imgs
from bluesky.examples import ReaderWithRegistryHandler
from bluesky.tests.conftest import fresh_RE
from bluesky.tests.conftest import db
|
from xpdan . tests . utils import insert_imgs
|
from . utils import insert_imgs
|
SINGLE_STMT
|
[["Insert", ["import_from_statement", 3, 1, 3, 42], ["relative_import", "N0"], 1], ["Insert", "N0", ["import_prefix", "N1"], 0], ["Insert", "N0", ["dotted_name", "N2"], 1], ["Move", "N1", [".:.", 3, 11, 3, 12], 0], ["Move", "N2", ["identifier:utils", 3, 18, 3, 23], 0], ["Delete", ["identifier:xpdan", 3, 6, 3, 11]], ["Delete", ["identifier:tests", 3, 12, 3, 17]], ["Delete", [".:.", 3, 17, 3, 18]], ["Delete", ["dotted_name", 3, 6, 3, 23]]]
|
scipy
|
cdd84f6926a3c1835c13393aa4207e1163d78249
|
9a5820bfb6f52e8d29458b4104618d8ba5cc05fa
|
scipy/interpolate/tests/test_interpolate.py
|
https://github.com/scipy/scipy
| true
| false
| true
|
@@ -72,7 +72,7 @@ class TestInterp2D(TestCase):
def test_interp2d_bounds(self):
x = np.linspace(0, 1, 5)
y = np.linspace(0, 2, 7)
- z = x[:,None]**2 + y[None,:]
+ z = x[None, :]**2 + y[:, None]
ix = np.linspace(-1, 3, 31)
iy = np.linspace(-1, 3, 33)
|
z = x [ : , None ] ** 2 + y [ None , : ]
|
z = x [ None , : ] ** 2 + y [ : , None ]
|
SINGLE_STMT
|
[["Move", ["slice", 3, 35, 3, 36], ["subscript", 3, 28, 3, 37], 2], ["Insert", ["subscript", 3, 28, 3, 37], ["none:None", "T"], 5], ["Insert", ["subscript", 3, 13, 3, 22], ["none:None", "T"], 2], ["Insert", ["subscript", 3, 13, 3, 22], [",:,", "T"], 3], ["Delete", [",:,", 3, 16, 3, 17]], ["Delete", ["none:None", 3, 17, 3, 21]], ["Delete", ["none:None", 3, 30, 3, 34]]]
|
conda
|
fb5f1ca4ea5b0aad3e759e5419cccc646be9fd40
|
1ccae9296a1c041149e4b3dd4f896dea09398e2d
|
conda/utils.py
|
https://github.com/minrk/conda
| true
| false
| true
|
@@ -289,7 +289,7 @@ def sys_prefix_unfollowed():
try:
- frame = sys._current_frames().values()[0]
+ frame = list(sys._current_frames().values())[0]
while frame.f_back:
frame = frame.f_back
code = frame.f_code
|
frame = sys . _current_frames ( ) . values ( ) [ 0 ]
|
frame = list ( sys . _current_frames ( ) . values ( ) ) [ 0 ]
|
ADD_FUNCTION_AROUND_EXPRESSION
|
[["Insert", ["call", 1, 17, 1, 47], ["identifier:list", "T"], 0], ["Insert", ["call", 1, 17, 1, 47], ["argument_list", "N0"], 1], ["Insert", "N0", ["(:(", "T"], 0], ["Move", "N0", ["call", 1, 17, 1, 47], 1], ["Insert", "N0", ["):)", "T"], 2]]
|
poky-openuxas
|
b6b6d923a6f81c96590d091cd9eebd1bd2031045
|
d32d08fd4891bc6998349875eb2deb36bb7322c8
|
meta/lib/oe/sstatesig.py
|
https://github.com/afrl-rq/poky-openuxas
| true
| false
| true
|
@@ -128,7 +128,7 @@ def find_siginfo(pn, taskname, taskhashlist, d):
else:
filedates[fullpath] = os.stat(fullpath).st_mtime
- if len(filedates) < 2 and not foundall:
+ if not taskhashlist or (len(filedates) < 2 and not foundall):
# That didn't work, look in sstate-cache
hashes = taskhashlist or ['*']
localdata = bb.data.createCopy(d)
|
if len ( filedates ) < 2 and not foundall : hashes = taskhashlist or [ '*' ] localdata = bb . data . createCopy ( d )
|
if not taskhashlist or ( len ( filedates ) < 2 and not foundall ) : hashes = taskhashlist or [ '*' ] localdata = bb . data . createCopy ( d )
|
SINGLE_STMT
|
[["Insert", ["if_statement", 3, 5, 6, 42], ["not_operator", "N0"], 1], ["Insert", "N0", ["not:not", "T"], 0], ["Insert", "N0", ["boolean_operator", "N1"], 1], ["Insert", "N1", ["identifier:taskhashlist", "T"], 0], ["Insert", "N1", ["or:or", "T"], 1], ["Insert", "N1", ["parenthesized_expression", "N2"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Move", "N2", ["boolean_operator", 3, 8, 3, 43], 1], ["Insert", "N2", ["):)", "T"], 2]]
|
dosenet-raspberrypi
|
63dfbca35eaba68f3fc4a322d34455577324973c
|
7407966fa2ab8c8af55fb323a1b07ea44264da0e
|
tests.py
|
https://github.com/tybtab/dosenet-raspberrypi
| true
| false
| true
|
@@ -354,7 +354,7 @@ class TestDataLog(unittest.TestCase):
mgr.takedown()
self.assertIsNotNone(output)
- self.assertEqual(len(output), 3)
+ self.assertEqual(len(output), 2)
def tearDown(self):
os.remove(DEFAULT_TEST_DATALOG)
|
self . assertEqual ( len ( output ) , 3 )
|
self . assertEqual ( len ( output ) , 2 )
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:3", 3, 39, 3, 40], "2"]]
|
data-science-bowl-2019
|
757809892fd2f1d290bc8b5dd5ea60a2d43a7484
|
3028159abe2c9e06d3bc1086e87911845b6be1f5
|
pipelines.py
|
https://github.com/akshayjh/data-science-bowl-2019
| true
| false
| true
|
@@ -33,7 +33,7 @@ def unet(config, train_mode):
output = Step(name='output',
transformer=Dummy(),
input_steps=[detached],
- adapter={'y_pred': ([(detached.name, 'labels')]),
+ adapter={'y_pred': ([(detached.name, 'labeled_images')]),
},
cache_dirpath=config.env.cache_dirpath)
return output
|
output = Step ( name = 'output' , transformer = Dummy ( ) , input_steps = [ detached ] , adapter = { 'y_pred' : ( [ ( detached . name , 'labels' ) ] ) , } , cache_dirpath = config . env . cache_dirpath )
|
output = Step ( name = 'output' , transformer = Dummy ( ) , input_steps = [ detached ] , adapter = { 'y_pred' : ( [ ( detached . name , 'labeled_images' ) ] ) , } , cache_dirpath = config . env . cache_dirpath )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'labels'", 3, 56, 3, 64], "'labeled_images'"]]
|
dipy
|
de669da1b53b4c16050adf5e9c76991db3f8c74a
|
00dca4c13ad7b76ea96a622e982ef94d87008b0a
|
dipy/reconst/dti.py
|
https://github.com/tomotech/dipy
| true
| false
| true
|
@@ -1193,7 +1193,7 @@ class TensorFit(object):
if S0 is None:
- S0 = self.S0_hat
+ S0 = self.S0 #note it's S0, not S0_hat because the shapes aren't the same
shape = self.model_params.shape[:-1]
size = np.prod(shape)
if step is None:
|
S0 = self . S0_hat
|
S0 = self . S0
|
CHANGE_ATTRIBUTE_USED
|
[["Update", ["identifier:S0_hat", 1, 23, 1, 29], "S0"]]
|
cobbler
|
44ea790788e4d985c1192292711ccda4ac9963e6
|
a2ec18bcaf8909dae0bb5028b28293c6778d8015
|
cobbler/cli.py
|
https://github.com/hardion/cobbler
| true
| false
| true
|
@@ -274,7 +274,7 @@ class BootCLI:
self.parser.add_option("--name", dest="name", help="name of object")
(options, args) = self.parser.parse_args()
- if object_action in [ "add", "edit", "copy", "rename", "remove" ]:
+ if object_action in [ "add", "edit", "copy", "rename", "remove", "reboot" ]:
if opt(options, "name") == "":
print "--name is required"
sys.exit(1)
|
if object_action in [ "add" , "edit" , "copy" , "rename" , "remove" ] : if opt ( options , "name" ) == "" : print "--name is required" sys . exit ( 1 )
|
if object_action in [ "add" , "edit" , "copy" , "rename" , "remove" , "reboot" ] : if opt ( options , "name" ) == "" : print "--name is required" sys . exit ( 1 )
|
ADD_ELEMENTS_TO_ITERABLE
|
[["Insert", ["list", 3, 29, 3, 74], [",:,", "T"], 10], ["Insert", ["list", 3, 29, 3, 74], ["string:\"reboot\"", "T"], 11]]
|
intel-manager-for-lustre
|
b176e8703dd1ae3323c678c1f08e4795978da4a9
|
8cdcd75a284c7c9b6044ff01d1be698dc236a325
|
monitor/tasks.py
|
https://github.com/brianjmurrell/intel-manager-for-lustre
| true
| false
| true
|
@@ -58,7 +58,7 @@ def audit_all():
if not host.monitor.last_success and not host.is_available():
# Set the HostContactAlert high
from monitor.models import HostContactAlert
- HostContactAlert.notify(ManagedHost.objects.get(pk = host['id']), True)
+ HostContactAlert.notify(host, True)
else:
for host in ManagedHost.objects.all():
if host.monitor:
|
HostContactAlert . notify ( ManagedHost . objects . get ( pk = host [ 'id' ] ) , True )
|
HostContactAlert . notify ( host , True )
|
SINGLE_STMT
|
[["Move", ["argument_list", 3, 40, 3, 88], ["identifier:host", 3, 70, 3, 74], 1], ["Move", ["argument_list", 3, 40, 3, 88], ["):)", 3, 80, 3, 81], 5], ["Delete", ["identifier:ManagedHost", 3, 41, 3, 52]], ["Delete", [".:.", 3, 52, 3, 53]], ["Delete", ["identifier:objects", 3, 53, 3, 60]], ["Delete", ["attribute", 3, 41, 3, 60]], ["Delete", [".:.", 3, 60, 3, 61]], ["Delete", ["identifier:get", 3, 61, 3, 64]], ["Delete", ["attribute", 3, 41, 3, 64]], ["Delete", ["(:(", 3, 64, 3, 65]], ["Delete", ["identifier:pk", 3, 65, 3, 67]], ["Delete", ["=:=", 3, 68, 3, 69]], ["Delete", ["[:[", 3, 74, 3, 75]], ["Delete", ["string:'id'", 3, 75, 3, 79]], ["Delete", ["]:]", 3, 79, 3, 80]], ["Delete", ["subscript", 3, 70, 3, 80]], ["Delete", ["keyword_argument", 3, 65, 3, 80]], ["Delete", ["argument_list", 3, 64, 3, 81]], ["Delete", ["call", 3, 41, 3, 81]], ["Delete", ["):)", 3, 87, 3, 88]]]
|
ansible-modules-extras
|
0af9622891426ae58b8419e2842f760e6b3fbba7
|
d0c607c7a40cc4f78fdde120a87c50e569349507
|
messaging/rabbitmq_user.py
|
https://github.com/planetlabs/ansible-modules-extras
| true
| false
| true
|
@@ -228,7 +228,7 @@ def main():
user=dict(required=True, aliases=['username', 'name']),
password=dict(default=None),
tags=dict(default=None),
- permissions=dict(default=list()),
+ permissions=dict(default=list(), type='list'),
vhost=dict(default='/'),
configure_priv=dict(default='^$'),
write_priv=dict(default='^$'),
|
permissions = dict ( default = list ( ) ) ,
|
permissions = dict ( default = list ( ) , type = 'list' ) ,
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 25, 3, 41], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 25, 3, 41], ["keyword_argument", "N0"], 3], ["Insert", "N0", ["identifier:type", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["string:'list'", "T"], 2]]
|
cpython
|
cb6c2669ff51b690f42353e235ea9d5ca62e4cb7
|
5d19c38a80c996cb3636b031b5d10c71a96cbf1f
|
Lib/distutils/command/install_scripts.py
|
https://github.com/klange/cpython
| true
| false
| true
|
@@ -53,7 +53,7 @@ class install_scripts (Command):
if self.dry_run:
log.info("changing mode of %s", file)
else:
- mode = ((os.stat(file)[ST_MODE]) | 0111) & 07777
+ mode = ((os.stat(file)[ST_MODE]) | 0555) & 07777
log.info("changing mode of %s to %o", file, mode)
os.chmod(file, mode)
|
mode = ( ( os . stat ( file ) [ ST_MODE ] ) | 0111 ) & 07777
|
mode = ( ( os . stat ( file ) [ ST_MODE ] ) | 0555 ) & 07777
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:0111", 3, 56, 3, 60], "0555"]]
|
sql-agg
|
37252018aff899d0b1c771154f1c6cee8a80b1f3
|
7ee7ea6e9b5de777c7f699fb4488f48af89758a5
|
setup.py
|
https://github.com/dimagi/sql-agg
| true
| false
| false
|
@@ -19,7 +19,7 @@ setup(
'unittest2',
'nose',
'SQLAlchemy-Fixtures>=0.1.5',
- 'fixture>=1.4'
+ 'fixture>=1.4',
'psycopg2'
]
)
|
'fixture>=1.4'
|
'fixture>=1.4' ,
|
SINGLE_STMT
|
[["Insert", ["expression_statement", 3, 9, 3, 23], [",:,", "T"], 1]]
|
electrum-zeny
|
c0535634fda7363b0dc105b7e53d68f6ffd99bfc
|
671b1574b39ab2bb0c7b90740968f90ecc67ca0a
|
gui/qt/installwizard.py
|
https://github.com/BitzenyCoreDevelopers/electrum-zeny
| true
| false
| true
|
@@ -85,7 +85,7 @@ class InstallWizard(QDialog):
def get_seed_text(self, seed_e):
- text = unicode(seed_e.toPlainText()).lower().strip()
+ text = unicode(seed_e.toPlainText()).strip()
text = ' '.join(text.split())
return text
|
text = unicode ( seed_e . toPlainText ( ) ) . lower ( ) . strip ( )
|
text = unicode ( seed_e . toPlainText ( ) ) . strip ( )
|
SINGLE_STMT
|
[["Delete", ["(:(", 3, 51, 3, 52]], ["Delete", ["):)", 3, 52, 3, 53]], ["Delete", ["argument_list", 3, 51, 3, 53]]]
|
catterplotpy
|
39d670ba5832c08f5aa3d8dc19f87cd8ad298443
|
99aea1b6a9ae243c560a021556cec4896d3c2b3f
|
catterplot/__init__.py
|
https://github.com/eteq/catterplotpy
| true
| false
| false
|
@@ -1 +1 @@
-from core import *
+from .core import *
|
from core import *
|
from . core import *
|
SINGLE_STMT
|
[["Insert", ["import_from_statement", 0, 1, 0, 19], ["relative_import", "N0"], 1], ["Insert", "N0", ["import_prefix", "N1"], 0], ["Move", "N0", ["dotted_name", 0, 6, 0, 10], 1], ["Insert", "N1", [".:.", "T"], 0]]
|
pywikipedia
|
6351c44800c8116d0374a8284aa8a3e2879e0692
|
105f6ab4252df3aa79ce53fa2ea89e8c280ea44a
|
pywikipedia/userinterfaces/terminal_interface.py
|
https://github.com/trottier/pywikipedia
| true
| false
| true
|
@@ -158,7 +158,7 @@ class UI:
# ctypes is only available since Python 2.5, and we won't
# try to colorize without it. Instead we add *** after the text as a whole
# if anything needed to be colorized.
- lines = '\n'.split(text)
+ lines = text.split('\n')
for line in lines:
line, count = colorTagR.subn('', line)
if count > 0:
|
lines = '\n' . split ( text )
|
lines = text . split ( '\n' )
|
SINGLE_STMT
|
[["Move", ["attribute", 3, 21, 3, 31], ["identifier:text", 3, 32, 3, 36], 0], ["Move", ["argument_list", 3, 31, 3, 37], ["string:'\\n'", 3, 21, 3, 25], 1]]
|
cob_command_tools
|
c4b7fa3a22b8dfd631a4fe7f6c8c6cdef5f23706
|
8f7d7a3f09b3bbe2d7a48126af021ac4ff13bce5
|
cob_monitoring/src/battery_light_monitor.py
|
https://github.com/ipa-fmw/cob_command_tools
| true
| false
| true
|
@@ -115,7 +115,7 @@ class battery_light_monitor():
goal.mode = mode
client.send_goal(goal)
client.wait_for_result()
- res = client.getResult()
+ res = client.get_result()
if track:
self.track_id_light[component] = res.track_id
|
res = client . getResult ( )
|
res = client . get_result ( )
|
WRONG_FUNCTION_NAME
|
[["Update", ["identifier:getResult", 3, 30, 3, 39], "get_result"]]
|
pycket
|
e3794542055fab8d86942e2b8c36399923605af5
|
a3b6dfb60bc2a9b4af69aa7db975b8d880397c01
|
pycket/values_string.py
|
https://github.com/pycket/pycket
| true
| false
| true
|
@@ -517,7 +517,7 @@ class UnicodeMutableStringStrategy(MutableStringStrategy):
assert 0
def as_unicharlist(self, w_str):
- return self.unerase(w_str.get_storage()[:])
+ return self.unerase(w_str.get_storage())[:]
def as_unicode(self, w_str):
return u"".join(self.unerase(w_str.get_storage()))
|
return self . unerase ( w_str . get_storage ( ) [ : ] )
|
return self . unerase ( w_str . get_storage ( ) ) [ : ]
|
SINGLE_STMT
|
[["Move", ["return_statement", 3, 9, 3, 52], ["subscript", 3, 29, 3, 51], 1], ["Move", ["subscript", 3, 29, 3, 51], ["call", 3, 16, 3, 52], 0], ["Move", ["argument_list", 3, 28, 3, 52], ["call", 3, 29, 3, 48], 1]]
|
sd_foregrounds
|
feccb33dce13f0ecee88672e7c4a41d38ddf6874
|
00ca14581503dcd88f875cdb136d3d64bf722db8
|
foregrounds.py
|
https://github.com/mabitbol/sd_foregrounds
| true
| false
| true
|
@@ -65,7 +65,7 @@ def spinning_dust(nu, Asd=92.e-6):
return krj_to_radiance(nu, Asd * (nu0 / nu) ** 2 * numer_fsd / denom_fsd)
-def thermal_dust_rad(nu, Ad=163.e-6, Bd=1.83, Td=21.):
+def thermal_dust_rad(nu, Ad=163.e-6, Bd=1.53, Td=21.):
return krj_to_radiance(nu, thermal_dust(nu, Ad, Bd, Td))
def thermal_dust(nu, Ad=163.e-6, Bd=1.53, Td=21.):
|
def thermal_dust_rad ( nu , Ad = 163.e-6 , Bd = 1.83 , Td = 21. ) : return krj_to_radiance ( nu , thermal_dust ( nu , Ad , Bd , Td ) )
|
def thermal_dust_rad ( nu , Ad = 163.e-6 , Bd = 1.53 , Td = 21. ) : return krj_to_radiance ( nu , thermal_dust ( nu , Ad , Bd , Td ) )
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["float:1.83", 3, 41, 3, 45], "1.53"]]
|
sphinx-1
|
db8f42841428f69a89fce64b6f595feb1fcf80a7
|
8f820c97aebf8e7076fc5613445001194102599c
|
sphinx/directives/desc.py
|
https://github.com/ezc/sphinx-1
| true
| false
| true
|
@@ -456,7 +456,7 @@ class ClassmemberDesc(PythonDesc):
def needs_arglist(self):
- return self.argtype.endswith('method')
+ return self.desctype.endswith('method')
def get_signature_prefix(self, sig):
if self.desctype == 'staticmethod':
|
return self . argtype . endswith ( 'method' )
|
return self . desctype . endswith ( 'method' )
|
CHANGE_ATTRIBUTE_USED
|
[["Update", ["identifier:argtype", 2, 21, 2, 28], "desctype"]]
|
celery
|
be4253b64d1695c1ef22e7e2b9cd275280e52b98
|
4feca5e50ef2b9c3010dcc7d8cb71f5653a88833
|
celery/platform.py
|
https://github.com/nuSPIC/celery
| true
| false
| true
|
@@ -56,7 +56,7 @@ def create_daemon_context(logfile=None, pidfile=None, **options):
# set SIGCLD back to the default SIG_DFL (before python-daemon overrode
# it) lets the parent wait() for the terminated child process and stops
# the 'OSError: [Errno 10] No child processes' problem.
- platform.reset_signal("SIGCLD")
+ reset_signal("SIGCLD")
# Since without stderr any errors will be silently suppressed,
# we need to know that we have access to the logfile
|
platform . reset_signal ( "SIGCLD" )
|
reset_signal ( "SIGCLD" )
|
SINGLE_STMT
|
[["Move", ["call", 3, 5, 3, 36], ["identifier:reset_signal", 3, 14, 3, 26], 0], ["Delete", ["identifier:platform", 3, 5, 3, 13]], ["Delete", [".:.", 3, 13, 3, 14]], ["Delete", ["attribute", 3, 5, 3, 26]]]
|
electrum-trump
|
cdbee6f4c93c7ced320707260836cd844f83c523
|
2d1600d350254e7b4b1079992d227ca601b30c12
|
gui/qt/main_window.py
|
https://github.com/Skirmant/electrum-trump
| true
| false
| false
|
@@ -98,7 +98,7 @@ pr_tooltips = {
expiration_values = [
(_('1 hour'), 60*60),
- (_('1 day'), 24*64*64),
+ (_('1 day'), 24*60*60),
(_('1 week'), 7*24*60*60),
(_('Never'), None)
]
|
expiration_values = [ ( _ ( '1 hour' ) , 60 * 60 ) , ( _ ( '1 day' ) , 24 * 64 * 64 ) , ( _ ( '1 week' ) , 7 * 24 * 60 * 60 ) , ( _ ( 'Never' ) , None ) ]
|
expiration_values = [ ( _ ( '1 hour' ) , 60 * 60 ) , ( _ ( '1 day' ) , 24 * 60 * 60 ) , ( _ ( '1 week' ) , 7 * 24 * 60 * 60 ) , ( _ ( 'Never' ) , None ) ]
|
SINGLE_STMT
|
[["Update", ["integer:64", 3, 24, 3, 26], "60"], ["Update", ["integer:64", 3, 21, 3, 23], "60"]]
|
watch-do
|
5fe3f7b6cc9db12256e6539c82f1091a59c02055
|
9a078b9219bd961f5a1ea0deea30e22873ff04ca
|
setup.py
|
https://github.com/vimist/watch-do
| true
| false
| false
|
@@ -23,7 +23,7 @@ setup(
'Topic :: Software Development',
'Topic :: System'
],
- packages=['watch_do'],
+ packages=['watch_do', 'watch_do.doers', 'watch_do.watchers'],
entry_points={
'console_scripts': [
'watch-do=watch_do.cli:watch_do'
|
packages = [ 'watch_do' ] ,
|
packages = [ 'watch_do' , 'watch_do.doers' , 'watch_do.watchers' ] ,
|
ADD_ELEMENTS_TO_ITERABLE
|
[["Insert", ["list", 3, 14, 3, 26], [",:,", "T"], 2], ["Insert", ["list", 3, 14, 3, 26], ["string:'watch_do.doers'", "T"], 3], ["Insert", ["list", 3, 14, 3, 26], [",:,", "T"], 4], ["Insert", ["list", 3, 14, 3, 26], ["string:'watch_do.watchers'", "T"], 5]]
|
enigma2
|
94d5be2305939d16b5ea2746668efe02d8ca5621
|
f5d433dcfa17bfd5d1ba8b7202bcfbd3ebcf44f9
|
lib/python/Components/UsageConfig.py
|
https://github.com/gdpablo/enigma2
| true
| false
| true
|
@@ -579,7 +579,7 @@ def InitUsageConfig():
config.backupmanager.backupretry = ConfigNumber(default = 30)
config.backupmanager.backupretrycount = NoSave(ConfigNumber(default = 0))
config.backupmanager.nextscheduletime = NoSave(ConfigNumber(default = 0))
- config.backupmanager.backupdirs = ConfigLocations(default=[eEnv.resolve('${sysconfdir}/enigma2/'), eEnv.resolve('${sysconfdir}/fstab'), eEnv.resolve('${sysconfdir}/hostname'), eEnv.resolve('${sysconfdir}/network/interfaces'), eEnv.resolve('${sysconfdir}/passwd'), eEnv.resolve('${sysconfdir}//etc/shadow'), eEnv.resolve('${sysconfdir}/resolv.conf'), eEnv.resolve('${sysconfdir}/ushare.conf'), eEnv.resolve('${sysconfdir}/inadyn.conf'), eEnv.resolve('${sysconfdir}/tuxbox/config/'), eEnv.resolve('${sysconfdir}/wpa_supplicant.conf'), '/usr/softcams/'])
+ config.backupmanager.backupdirs = ConfigLocations(default=[eEnv.resolve('${sysconfdir}/enigma2/'), eEnv.resolve('${sysconfdir}/fstab'), eEnv.resolve('${sysconfdir}/hostname'), eEnv.resolve('${sysconfdir}/network/interfaces'), eEnv.resolve('${sysconfdir}/passwd'), eEnv.resolve('${sysconfdir}/etc/shadow'), eEnv.resolve('${sysconfdir}/resolv.conf'), eEnv.resolve('${sysconfdir}/ushare.conf'), eEnv.resolve('${sysconfdir}/inadyn.conf'), eEnv.resolve('${sysconfdir}/tuxbox/config/'), eEnv.resolve('${sysconfdir}/wpa_supplicant.conf'), '/usr/softcams/'])
config.backupmanager.lastlog = ConfigText(default=' ', fixed_size=False)
config.vixsettings = ConfigSubsection()
|
config . backupmanager . backupdirs = ConfigLocations ( default = [ eEnv . resolve ( '${sysconfdir}/enigma2/' ) , eEnv . resolve ( '${sysconfdir}/fstab' ) , eEnv . resolve ( '${sysconfdir}/hostname' ) , eEnv . resolve ( '${sysconfdir}/network/interfaces' ) , eEnv . resolve ( '${sysconfdir}/passwd' ) , eEnv . resolve ( '${sysconfdir}//etc/shadow' ) , eEnv . resolve ( '${sysconfdir}/resolv.conf' ) , eEnv . resolve ( '${sysconfdir}/ushare.conf' ) , eEnv . resolve ( '${sysconfdir}/inadyn.conf' ) , eEnv . resolve ( '${sysconfdir}/tuxbox/config/' ) , eEnv . resolve ( '${sysconfdir}/wpa_supplicant.conf' ) , '/usr/softcams/' ] )
|
config . backupmanager . backupdirs = ConfigLocations ( default = [ eEnv . resolve ( '${sysconfdir}/enigma2/' ) , eEnv . resolve ( '${sysconfdir}/fstab' ) , eEnv . resolve ( '${sysconfdir}/hostname' ) , eEnv . resolve ( '${sysconfdir}/network/interfaces' ) , eEnv . resolve ( '${sysconfdir}/passwd' ) , eEnv . resolve ( '${sysconfdir}/etc/shadow' ) , eEnv . resolve ( '${sysconfdir}/resolv.conf' ) , eEnv . resolve ( '${sysconfdir}/ushare.conf' ) , eEnv . resolve ( '${sysconfdir}/inadyn.conf' ) , eEnv . resolve ( '${sysconfdir}/tuxbox/config/' ) , eEnv . resolve ( '${sysconfdir}/wpa_supplicant.conf' ) , '/usr/softcams/' ] )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'${sysconfdir}//etc/shadow'", 3, 279, 3, 306], "'${sysconfdir}/etc/shadow'"]]
|
firewalld
|
a41da62fd56efd954eac1e459ab0ae1914617f9b
|
992c8ab8044c83c42c3c82dc23f4163a0ec84453
|
src/firewall/core/fw.py
|
https://github.com/NetworkManager/firewalld
| true
| false
| true
|
@@ -251,7 +251,7 @@ class Firewall:
(ipv, rule) = rules[i]
# drop insert rule number if it exists
- if insert and not enable and isinstance(rule[1], IntType):
+ if insert and not enable and isinstance(rule[1], int):
rule.pop(1)
# run
|
if insert and not enable and isinstance ( rule [ 1 ] , IntType ) : rule . pop ( 1 )
|
if insert and not enable and isinstance ( rule [ 1 ] , int ) : rule . pop ( 1 )
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:IntType", 3, 62, 3, 69], "int"]]
|
numba
|
01cd4f2f4ed3a26e189eec994478c7fa1d3a962c
|
f82ed9ff82f31a1ab87dc1511a442cfc2d4a22fa
|
numba/typed/typedlist.py
|
https://github.com/Juanlu001/numba
| true
| false
| true
|
@@ -357,7 +357,7 @@ class List(MutableSequence):
def extend(self, iterable):
# Empty iterable, do nothing
if len(iterable) == 0:
- return self
+ return None
if not self._typed:
# Need to get the first element of the iterable to initialise the
# type of the list. FIXME: this may be a problem if the iterable
|
return self
|
return None
|
SINGLE_TOKEN
|
[["Insert", ["return_statement", 3, 13, 3, 24], ["none:None", "T"], 1], ["Delete", ["identifier:self", 3, 20, 3, 24]]]
|
piuparts-old
|
becc50fd6fa667c93adcee5e9397b3fea2894383
|
686d3e9ede6357e7412e250bfba31d981f9bd521
|
piuparts.py
|
https://github.com/anbe42/piuparts-old
| true
| false
| true
|
@@ -1914,7 +1914,7 @@ def parse_command_line():
parser.add_option("--dpkg-force-confdef",
default=False,
action='store_true',
- help="Make dpkg use --force-confdef, which lets dpkg always choose the default action when a modified conffile is found. This options will make piuparts ignore errors it was designed to report and therefore should only be used to hide problems in depending packages.")
+ help="Make dpkg use --force-confdef, which lets dpkg always choose the default action when a modified conffile is found. This option will make piuparts ignore errors it was designed to report and therefore should only be used to hide problems in depending packages.")
parser.add_option("--do-not-verify-signatures", default=False,
action='store_true',
|
parser . add_option ( "--dpkg-force-confdef" , default = False , action = 'store_true' , help = "Make dpkg use --force-confdef, which lets dpkg always choose the default action when a modified conffile is found. This options will make piuparts ignore errors it was designed to report and therefore should only be used to hide problems in depending packages." )
|
parser . add_option ( "--dpkg-force-confdef" , default = False , action = 'store_true' , help = "Make dpkg use --force-confdef, which lets dpkg always choose the default action when a modified conffile is found. This option will make piuparts ignore errors it was designed to report and therefore should only be used to hide problems in depending packages." )
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:\"Make dpkg use --force-confdef, which lets dpkg always choose the default action when a modified conffile is found. This options will make piuparts ignore errors it was designed to report and therefore should only be used to hide problems in depending packages.\"", 3, 14, 3, 276], "\"Make dpkg use --force-confdef, which lets dpkg always choose the default action when a modified conffile is found. This option will make piuparts ignore errors it was designed to report and therefore should only be used to hide problems in depending packages.\""]]
|
firewalld
|
c1e40cf55affd2393aa070a124476b066c0197c6
|
77cff332d99fa56adc0ea5ddabde069e95593fa2
|
src/firewall/core/fw_config.py
|
https://github.com/jpopelka/firewalld
| true
| false
| true
|
@@ -108,7 +108,7 @@ class FirewallConfig:
return self._firewalld_conf
def update_firewalld_conf(self):
- if not os.path.exists(LOCKDOWN_WHITELIST):
+ if not os.path.exists(FIREWALLD_CONF):
self._firewalld_conf.clear()
else:
self._firewalld_conf.read()
|
if not os . path . exists ( LOCKDOWN_WHITELIST ) : self . _firewalld_conf . clear ( ) else : self . _firewalld_conf . read ( )
|
if not os . path . exists ( FIREWALLD_CONF ) : self . _firewalld_conf . clear ( ) else : self . _firewalld_conf . read ( )
|
CHANGE_IDENTIFIER_USED
|
[["Update", ["identifier:LOCKDOWN_WHITELIST", 3, 31, 3, 49], "FIREWALLD_CONF"]]
|
electrum
|
9c28489bc025929eb25c53b7b10562f95179a98d
|
46e602357835bed655b8c9b5d46879e34098a957
|
lib/wallet.py
|
https://github.com/neocogent/electrum
| true
| false
| true
|
@@ -995,7 +995,7 @@ class Abstract_Wallet(PrintError):
self.synchronize()
def can_export(self):
- return not self.is_watching_only()
+ return not self.is_watching_only() and hasattr(self.keystore, 'get_private_key')
def is_used(self, address):
h = self.history.get(address,[])
|
return not self . is_watching_only ( )
|
return not self . is_watching_only ( ) and hasattr ( self . keystore , 'get_private_key' )
|
SINGLE_STMT
|
[["Insert", ["not_operator", 3, 16, 3, 43], ["boolean_operator", "N0"], 1], ["Move", "N0", ["call", 3, 20, 3, 43], 0], ["Insert", "N0", ["and:and", "T"], 1], ["Insert", "N0", ["call", "N1"], 2], ["Insert", "N1", ["identifier:hasattr", "T"], 0], ["Insert", "N1", ["argument_list", "N2"], 1], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["attribute", "N3"], 1], ["Insert", "N2", [",:,", "T"], 2], ["Insert", "N2", ["string:'get_private_key'", "T"], 3], ["Insert", "N2", ["):)", "T"], 4], ["Insert", "N3", ["identifier:self", "T"], 0], ["Insert", "N3", [".:.", "T"], 1], ["Insert", "N3", ["identifier:keystore", "T"], 2]]
|
bokeh
|
5763cb2f95b731933263f0dd38fbffff6f71147c
|
a211c73561bea6bcc10f11a8870040e121651d9c
|
tests/examples/examples_report_plugin.py
|
https://github.com/TomAugspurger/bokeh
| true
| false
| false
|
@@ -9,7 +9,7 @@ import sys
from os.path import join, dirname
from py.xml import html
-from ..constants import __version__
+from ..constants import __version__, default_diff, default_timeout
from ..utils import write, green
from .utils import no_ext, human_bytes
|
from . . constants import __version__
|
from . . constants import __version__ , default_diff , default_timeout
|
SINGLE_STMT
|
[["Insert", ["import_from_statement", 3, 1, 3, 36], [",:,", "T"], 4], ["Insert", ["import_from_statement", 3, 1, 3, 36], ["dotted_name", "N0"], 5], ["Insert", ["import_from_statement", 3, 1, 3, 36], [",:,", "T"], 6], ["Insert", ["import_from_statement", 3, 1, 3, 36], ["dotted_name", "N1"], 7], ["Insert", "N0", ["identifier:default_diff", "T"], 0], ["Insert", "N1", ["identifier:default_timeout", "T"], 0]]
|
flask-security
|
8a14abaa1e74a5a0e18c3ab53a0eb5f417efd7f3
|
6b55e9613a93a34099124bfe3a3820dd902af9e1
|
flask_security/decorators.py
|
https://github.com/yingbo/flask-security
| true
| false
| true
|
@@ -154,7 +154,7 @@ def auth_required(*auth_methods):
if _security._unauthorized_callback:
return _security._unauthorized_callback()
else:
- return _get_unauthorized_response()
+ return _get_unauthorized_response(headers=h)
return decorated_view
return wrapper
|
return _get_unauthorized_response ( )
|
return _get_unauthorized_response ( headers = h )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 50, 3, 52], ["keyword_argument", "N0"], 1], ["Insert", "N0", ["identifier:headers", "T"], 0], ["Insert", "N0", ["=:=", "T"], 1], ["Insert", "N0", ["identifier:h", "T"], 2]]
|
phrerp
|
19da0247c83ac325c65f402e46bdc6d140e2cdfa
|
c674e43885bffde3752872392fd48a016487ff4c
|
accounts/doctype/cost_center/cost_center.py
|
https://github.com/indictranstech/phrerp
| true
| false
| true
|
@@ -55,7 +55,7 @@ class DocType:
webnotes.utils.nestedset.update_nsm(self)
def check_if_child_exists(self):
- return sql("select name from `tabCost Center` where parent_cost_center = %s and docstatus != 2", self.doc.name, debug=1)
+ return sql("select name from `tabCost Center` where parent_cost_center = %s and docstatus != 2", self.doc.name, debug=0)
# On Trash
# --------
|
return sql ( "select name from `tabCost Center` where parent_cost_center = %s and docstatus != 2" , self . doc . name , debug = 1 )
|
return sql ( "select name from `tabCost Center` where parent_cost_center = %s and docstatus != 2" , self . doc . name , debug = 0 )
|
CHANGE_NUMERIC_LITERAL
|
[["Update", ["integer:1", 3, 123, 3, 124], "0"]]
|
celery
|
6891f9368337ff2c104b5ad68df9d12d7215d906
|
3b28b859db19514b9537e6509c2950d938402863
|
celery/concurrency/base.py
|
https://github.com/UGentPortaal/celery
| true
| false
| true
|
@@ -41,7 +41,7 @@ class BasePool(object):
def on_apply(self, *args, **kwargs):
pass
- def kill_job(self, pid):
+ def terminate_job(self, pid):
raise NotImplementedError(
"%s does not implement kill_job" % (self.__class__, ))
|
def kill_job ( self , pid ) : raise NotImplementedError ( "%s does not implement kill_job" % ( self . __class__ , ) )
|
def terminate_job ( self , pid ) : raise NotImplementedError ( "%s does not implement kill_job" % ( self . __class__ , ) )
|
SINGLE_TOKEN
|
[["Update", ["identifier:kill_job", 3, 9, 3, 17], "terminate_job"]]
|
ChatterBot
|
8dae8bf6908d61b600bb31310df5e46564f29ff2
|
900c149e6f664e0ebc4f627d9290dce03fab4713
|
examples/learning_feedback_example.py
|
https://github.com/AxStudio/ChatterBot
| true
| false
| false
|
@@ -46,7 +46,7 @@ while True:
print('\n Is "{}" this a coherent response to "{}"? \n'.format(response, input_statement))
if get_feedback():
- bot.learn_response(response)
+ bot.learn_response(response,input_statement)
bot.output.process_response(response, confidence)
|
bot . learn_response ( response )
|
bot . learn_response ( response , input_statement )
|
SAME_FUNCTION_MORE_ARGS
|
[["Insert", ["argument_list", 3, 31, 3, 41], [",:,", "T"], 2], ["Insert", ["argument_list", 3, 31, 3, 41], ["identifier:input_statement", "T"], 3]]
|
sovrin-node
|
a6c3ae79d8c7a87cf4178ad9a5a704fa11bd0b10
|
0d13068cbb664526b547b05882dff5d2bf08b284
|
data/migrations/deb/1_3_428_to_1_3_429.py
|
https://github.com/andkononykhin/sovrin-node
| true
| false
| true
|
@@ -107,7 +107,7 @@ def add_tag_into_cred_def_id(val):
cred_def_id=new_cred_def_id.decode(),
revoc_def_type=rev_type,
revoc_def_tag=rev_tag)
- new_val[ID] = new_revoc_reg_def_id
+ new_val[ID] = new_revoc_reg_def_id.decode()
else:
return False
else:
|
revoc_def_tag = rev_tag ) new_val [ ID ] = new_revoc_reg_def_id
|
revoc_def_tag = rev_tag ) new_val [ ID ] = new_revoc_reg_def_id . decode ( )
|
ADD_METHOD_CALL
|
[["Insert", ["assignment", 3, 13, 3, 47], ["call", "N0"], 2], ["Insert", "N0", ["attribute", "N1"], 0], ["Insert", "N0", ["argument_list", "N2"], 1], ["Move", "N1", ["identifier:new_revoc_reg_def_id", 3, 27, 3, 47], 0], ["Insert", "N1", [".:.", "T"], 1], ["Insert", "N1", ["identifier:decode", "T"], 2], ["Insert", "N2", ["(:(", "T"], 0], ["Insert", "N2", ["):)", "T"], 1]]
|
goagent-me
|
80ffd0936424df8aa6c8aff9da015bc6ab9d4af5
|
9e89297c97f7e45f4f05ec05b1cd5792fc459230
|
local/proxy.py
|
https://github.com/justinleoye/goagent-me
| true
| false
| true
|
@@ -1095,7 +1095,7 @@ class LocalPacHandler(BaseHTTPServer.BaseHTTPRequestHandler):
return 'DIRECT';
}
}
- return 'SOCKS 127.0.0.1:8964';
+ return 'SOCKS 127.0.0.1:8087';
|
return 'SOCKS 127.0.0.1:8964'
|
return 'SOCKS 127.0.0.1:8087'
|
CHANGE_STRING_LITERAL
|
[["Update", ["string:'SOCKS 127.0.0.1:8964'", 3, 24, 3, 46], "'SOCKS 127.0.0.1:8087'"]]
|
fangmi-api
|
d789f8f63fb14a44bacf89614eb304d4b518a3cf
|
539d9b806235ae39504fa85bd989b70ecf0f772c
|
app/models.py
|
https://github.com/huxuan/fangmi-api
| true
| false
| true
|
@@ -1569,7 +1569,7 @@ class Captcha(db.Model):
timedelta(seconds=app.config['EMY_TIMEDELTA_SECONDS']):
raise utils.APIException(utils.API_CODE_CAPTCHA_EXCEED_FREQUENCY)
captcha.token = token
- captcha.delted = False
+ captcha.deleted = False
captcha.created_at = datetime.now()
else:
captcha = cls(
|
captcha . delted = False
|
captcha . deleted = False
|
CHANGE_ATTRIBUTE_USED
|
[["Update", ["identifier:delted", 3, 21, 3, 27], "deleted"]]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.