Copy+Attentional Convolutional


Original Name get,particle,flags,buffer

get

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

flags

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 2.0%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>


Original Name get,particle,position,buffer

get

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

position

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>


Original Name get,particle,velocity,buffer

get

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

velocity

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>


Original Name get,particle,color,buffer

get

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

color

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>


Original Name get,particle,group,buffer

get

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

group

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>


Original Name get,particle,user,data,buffer

get

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

user

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

data

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>


Original Name set,particle,flags,buffer

set

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

flags

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>


Original Name set,particle,position,buffer

set

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

position

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>


Original Name set,particle,velocity,buffer

set

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

velocity

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>


Original Name set,particle,color,buffer

set

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

color

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 2.0%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>


Original Name set,particle,user,data,buffer

set

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

user

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

data

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { m particle system . %SELF% ( buffer , capacity ) ; } <SENTENCE_END/>


Original Name get,particle,contacts

get

<SENTENCE_START> { return m particle system . m contact buffer ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { return m particle system . m contact buffer ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . m contact buffer ; } <SENTENCE_END/>

(Copy Probability: 37.2%)

<SENTENCE_START> { return m particle system . m contact buffer ; } <SENTENCE_END/>

contacts

<SENTENCE_START> { return m particle system . m contact buffer ; } <SENTENCE_END/>

(Copy Probability: 22.5%)

<SENTENCE_START> { return m particle system . m contact buffer ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . m contact buffer ; } <SENTENCE_END/>

(Copy Probability: 2.7%)

<SENTENCE_START> { return m particle system . m contact buffer ; } <SENTENCE_END/>


Original Name get,particle,contact,count

get

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

(Copy Probability: 26.8%)

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

contact

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

(Copy Probability: 20.4%)

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

count

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { return m particle system . m contact count ; } <SENTENCE_END/>


Original Name get,particle,body,contacts

get

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

(Copy Probability: 36.5%)

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

body

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

(Copy Probability: 14.4%)

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

contacts

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { return m particle system . m body contact buffer ; } <SENTENCE_END/>


Original Name get,particle,body,contact,count

get

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

(Copy Probability: 18.7%)

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

body

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

(Copy Probability: 12.5%)

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

contact

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

(Copy Probability: 2.7%)

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

count

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { return m particle system . m body contact count ; } <SENTENCE_END/>


Original Name compute,particle,collision,energy

compute

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

collision

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

energy

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { return m particle system . %SELF% ( ) ; } <SENTENCE_END/>


Original Name tree,callback

tree

<SENTENCE_START> { fixture proxy proxy = ( fixture proxy ) broad phase . get user data ( node id ) ; return callback . report fixture ( proxy . fixture ) ; } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { fixture proxy proxy = ( fixture proxy ) broad phase . get user data ( node id ) ; return callback . report fixture ( proxy . fixture ) ; } <SENTENCE_END/>

callback

<SENTENCE_START> { fixture proxy proxy = ( fixture proxy ) broad phase . get user data ( node id ) ; return callback . report fixture ( proxy . fixture ) ; } <SENTENCE_END/>

(Copy Probability: 75.4%)

<SENTENCE_START> { fixture proxy proxy = ( fixture proxy ) broad phase . get user data ( node id ) ; return callback . report fixture ( proxy . fixture ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { fixture proxy proxy = ( fixture proxy ) broad phase . get user data ( node id ) ; return callback . report fixture ( proxy . fixture ) ; } <SENTENCE_END/>

(Copy Probability: 23.8%)

<SENTENCE_START> { fixture proxy proxy = ( fixture proxy ) broad phase . get user data ( node id ) ; return callback . report fixture ( proxy . fixture ) ; } <SENTENCE_END/>


Original Name raycast,callback

raycast

<SENTENCE_START> { object user data = broad phase . get user data ( node id ) ; fixture proxy proxy = ( fixture proxy ) user data ; fixture fixture = proxy . fixture ; int index = proxy . child index ; boolean hit = fixture . raycast ( output , input , index ) ; if ( hit ) { float fraction = output . fraction ; temp . set ( input . p 2 ) . mul local ( fraction ) ; point . set ( input . p 1 ) . mul local ( 1 - fraction ) . add local ( temp ) ; return callback . report fixture ( fixture , point , output . normal , fraction ) ; } return input . max fraction ; } <SENTENCE_END/>

(Copy Probability: 5.8%)

<SENTENCE_START> { object user data = broad phase . get user data ( node id ) ; fixture proxy proxy = ( fixture proxy ) user data ; fixture fixture = proxy . fixture ; int index = proxy . child index ; boolean hit = fixture . raycast ( output , input , index ) ; if ( hit ) { float fraction = output . fraction ; temp . set ( input . p 2 ) . mul local ( fraction ) ; point . set ( input . p 1 ) . mul local ( 1 - fraction ) . add local ( temp ) ; return callback . report fixture ( fixture , point , output . normal , fraction ) ; } return input . max fraction ; } <SENTENCE_END/>

callback

<SENTENCE_START> { object user data = broad phase . get user data ( node id ) ; fixture proxy proxy = ( fixture proxy ) user data ; fixture fixture = proxy . fixture ; int index = proxy . child index ; boolean hit = fixture . raycast ( output , input , index ) ; if ( hit ) { float fraction = output . fraction ; temp . set ( input . p 2 ) . mul local ( fraction ) ; point . set ( input . p 1 ) . mul local ( 1 - fraction ) . add local ( temp ) ; return callback . report fixture ( fixture , point , output . normal , fraction ) ; } return input . max fraction ; } <SENTENCE_END/>

(Copy Probability: 36.6%)

<SENTENCE_START> { object user data = broad phase . get user data ( node id ) ; fixture proxy proxy = ( fixture proxy ) user data ; fixture fixture = proxy . fixture ; int index = proxy . child index ; boolean hit = fixture . raycast ( output , input , index ) ; if ( hit ) { float fraction = output . fraction ; temp . set ( input . p 2 ) . mul local ( fraction ) ; point . set ( input . p 1 ) . mul local ( 1 - fraction ) . add local ( temp ) ; return callback . report fixture ( fixture , point , output . normal , fraction ) ; } return input . max fraction ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { object user data = broad phase . get user data ( node id ) ; fixture proxy proxy = ( fixture proxy ) user data ; fixture fixture = proxy . fixture ; int index = proxy . child index ; boolean hit = fixture . raycast ( output , input , index ) ; if ( hit ) { float fraction = output . fraction ; temp . set ( input . p 2 ) . mul local ( fraction ) ; point . set ( input . p 1 ) . mul local ( 1 - fraction ) . add local ( temp ) ; return callback . report fixture ( fixture , point , output . normal , fraction ) ; } return input . max fraction ; } <SENTENCE_END/>

(Copy Probability: 10.8%)

<SENTENCE_START> { object user data = broad phase . get user data ( node id ) ; fixture proxy proxy = ( fixture proxy ) user data ; fixture fixture = proxy . fixture ; int index = proxy . child index ; boolean hit = fixture . raycast ( output , input , index ) ; if ( hit ) { float fraction = output . fraction ; temp . set ( input . p 2 ) . mul local ( fraction ) ; point . set ( input . p 1 ) . mul local ( 1 - fraction ) . add local ( temp ) ; return callback . report fixture ( fixture , point , output . normal , fraction ) ; } return input . max fraction ; } <SENTENCE_END/>


Original Name begin

begin

<SENTENCE_START> { start = target . get rotation ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { start = target . get rotation ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { start = target . get rotation ( ) ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { start = target . get rotation ( ) ; } <SENTENCE_END/>


Original Name update

update

<SENTENCE_START> { target . set rotation ( start + ( end - start ) * percent ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { target . set rotation ( start + ( end - start ) * percent ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { target . set rotation ( start + ( end - start ) * percent ) ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { target . set rotation ( start + ( end - start ) * percent ) ; } <SENTENCE_END/>


Original Name get,rotation

get

<SENTENCE_START> { return end ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return end ; } <SENTENCE_END/>

rotation

<SENTENCE_START> { return end ; } <SENTENCE_END/>

(Copy Probability: 17.3%)

<SENTENCE_START> { return end ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return end ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return end ; } <SENTENCE_END/>


Original Name set,rotation

set

<SENTENCE_START> { this . end = rotation ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { this . end = rotation ; } <SENTENCE_END/>

rotation

<SENTENCE_START> { this . end = rotation ; } <SENTENCE_END/>

(Copy Probability: 64.2%)

<SENTENCE_START> { this . end = rotation ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { this . end = rotation ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { this . end = rotation ; } <SENTENCE_END/>


Original Name get,default,vertex,shader

get

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

default

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

(Copy Probability: 97.2%)

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

vertex

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

(Copy Probability: 99.0%)

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

shader

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

(Copy Probability: 74.2%)

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>

(Copy Probability: 5.3%)

<SENTENCE_START> { if ( default vertex shader == null ) default vertex shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.vertex.glsl" ) . read string ( ) ; return default vertex shader ; } <SENTENCE_END/>


Original Name get,default,fragment,shader

get

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

default

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

(Copy Probability: 97.5%)

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

fragment

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

(Copy Probability: 99.2%)

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

shader

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

(Copy Probability: 80.1%)

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>

(Copy Probability: 5.5%)

<SENTENCE_START> { if ( default fragment shader == null ) default fragment shader = gdx . files . classpath ( "com/badlogic/gdx/tests/g3d/shadows/system/realistic/pass1.fragment.glsl" ) . read string ( ) ; return default fragment shader ; } <SENTENCE_END/>


Original Name add,mesh

add

<SENTENCE_START> { for ( model mesh other : meshes ) { if ( other . id . equals ( mesh . id ) ) { throw new gdx runtime exception ( "Mesh with id '" + other . id + "' already in model" ) ; } } meshes . add ( mesh ) ; } <SENTENCE_END/>

(Copy Probability: 1.9%)

<SENTENCE_START> { for ( model mesh other : meshes ) { if ( other . id . equals ( mesh . id ) ) { throw new gdx runtime exception ( "Mesh with id '" + other . id + "' already in model" ) ; } } meshes . add ( mesh ) ; } <SENTENCE_END/>

mesh

<SENTENCE_START> { for ( model mesh other : meshes ) { if ( other . id . equals ( mesh . id ) ) { throw new gdx runtime exception ( "Mesh with id '" + other . id + "' already in model" ) ; } } meshes . add ( mesh ) ; } <SENTENCE_END/>

(Copy Probability: 7.7%)

<SENTENCE_START> { for ( model mesh other : meshes ) { if ( other . id . equals ( mesh . id ) ) { throw new gdx runtime exception ( "Mesh with id '" + other . id + "' already in model" ) ; } } meshes . add ( mesh ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( model mesh other : meshes ) { if ( other . id . equals ( mesh . id ) ) { throw new gdx runtime exception ( "Mesh with id '" + other . id + "' already in model" ) ; } } meshes . add ( mesh ) ; } <SENTENCE_END/>

(Copy Probability: 4.7%)

<SENTENCE_START> { for ( model mesh other : meshes ) { if ( other . id . equals ( mesh . id ) ) { throw new gdx runtime exception ( "Mesh with id '" + other . id + "' already in model" ) ; } } meshes . add ( mesh ) ; } <SENTENCE_END/>


Original Name get,c,ptr

get

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

c

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 99.7%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>


Original Name new,instance

new

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( c , size ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( c , size ) ; } <SENTENCE_END/>

instance

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( c , size ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( c , size ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( c , size ) ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( c , size ) ; } <SENTENCE_END/>


Original Name get,length

get

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array ) ; } <SENTENCE_END/>

length

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array ) ; } <SENTENCE_END/>


Original Name get

get

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array , index ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array , index ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array , index ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { return java . lang . reflect . array . %SELF% ( array , index ) ; } <SENTENCE_END/>


Original Name set

set

<SENTENCE_START> { java . lang . reflect . array . %SELF% ( array , index , value ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { java . lang . reflect . array . %SELF% ( array , index , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { java . lang . reflect . array . %SELF% ( array , index , value ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { java . lang . reflect . array . %SELF% ( array , index , value ) ; } <SENTENCE_END/>


Original Name set

set

<SENTENCE_START> { this . x = point . x ; this . y = point . y ; return this ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { this . x = point . x ; this . y = point . y ; return this ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { this . x = point . x ; this . y = point . y ; return this ; } <SENTENCE_END/>

(Copy Probability: 11.3%)

<SENTENCE_START> { this . x = point . x ; this . y = point . y ; return this ; } <SENTENCE_END/>


Original Name set

set

<SENTENCE_START> { this . x = x ; this . y = y ; return this ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { this . x = x ; this . y = y ; return this ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { this . x = x ; this . y = y ; return this ; } <SENTENCE_END/>

(Copy Probability: 6.8%)

<SENTENCE_START> { this . x = x ; this . y = y ; return this ; } <SENTENCE_END/>


Original Name compute,tag

compute

<SENTENCE_START> { return ( ( ( long ) ( y + y offset ) ) << y shift ) + ( ( ( long ) ( x scale * x ) ) + x offset ) ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { return ( ( ( long ) ( y + y offset ) ) << y shift ) + ( ( ( long ) ( x scale * x ) ) + x offset ) ; } <SENTENCE_END/>

tag

<SENTENCE_START> { return ( ( ( long ) ( y + y offset ) ) << y shift ) + ( ( ( long ) ( x scale * x ) ) + x offset ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return ( ( ( long ) ( y + y offset ) ) << y shift ) + ( ( ( long ) ( x scale * x ) ) + x offset ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return ( ( ( long ) ( y + y offset ) ) << y shift ) + ( ( ( long ) ( x scale * x ) ) + x offset ) ; } <SENTENCE_END/>

(Copy Probability: 1.8%)

<SENTENCE_START> { return ( ( ( long ) ( y + y offset ) ) << y shift ) + ( ( ( long ) ( x scale * x ) ) + x offset ) ; } <SENTENCE_END/>


Original Name compute,relative,tag

compute

<SENTENCE_START> { return tag + ( y << y shift ) + ( x << x shift ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return tag + ( y << y shift ) + ( x << x shift ) ; } <SENTENCE_END/>

relative

<SENTENCE_START> { return tag + ( y << y shift ) + ( x << x shift ) ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return tag + ( y << y shift ) + ( x << x shift ) ; } <SENTENCE_END/>

tag

<SENTENCE_START> { return tag + ( y << y shift ) + ( x << x shift ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return tag + ( y << y shift ) + ( x << x shift ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return tag + ( y << y shift ) + ( x << x shift ) ; } <SENTENCE_END/>

(Copy Probability: 1.9%)

<SENTENCE_START> { return tag + ( y << y shift ) + ( x << x shift ) ; } <SENTENCE_END/>


Original Name limit,capacity

limit

<SENTENCE_START> { return max count != 0 && capacity > max count ? max count : capacity ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { return max count != 0 && capacity > max count ? max count : capacity ; } <SENTENCE_END/>

capacity

<SENTENCE_START> { return max count != 0 && capacity > max count ? max count : capacity ; } <SENTENCE_END/>

(Copy Probability: 75.9%)

<SENTENCE_START> { return max count != 0 && capacity > max count ? max count : capacity ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return max count != 0 && capacity > max count ? max count : capacity ; } <SENTENCE_END/>

(Copy Probability: 41.3%)

<SENTENCE_START> { return max count != 0 && capacity > max count ? max count : capacity ; } <SENTENCE_END/>


Original Name create,particle

create

<SENTENCE_START> { if ( m count >= m internal allocated capacity ) { int capacity = m count != 0 ? 2 * m count : settings . min particle buffer capacity ; capacity = limit capacity ( capacity , m max count ) ; capacity = limit capacity ( capacity , m flags buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m position buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m velocity buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m color buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m user data buffer . user supplied capacity ) ; if ( m internal allocated capacity < capacity ) { m flags buffer . data = reallocate buffer ( m flags buffer , m internal allocated capacity , capacity , false ) ; m position buffer . data = reallocate buffer ( m position buffer , m internal allocated capacity , capacity , false ) ; m velocity buffer . data = reallocate buffer ( m velocity buffer , m internal allocated capacity , capacity , false ) ; m accumulation buffer = buffer utils . reallocate buffer ( m accumulation buffer , 0 , m internal allocated capacity , capacity , false ) ; m accumulation 2 buffer = buffer utils . reallocate buffer ( vec 2 . class , m accumulation 2 buffer , 0 , m internal allocated capacity , capacity , true ) ; m depth buffer = buffer utils . reallocate buffer ( m depth buffer , 0 , m internal allocated capacity , capacity , true ) ; m color buffer . data = reallocate buffer ( m color buffer , m internal allocated capacity , capacity , true ) ; m group buffer = buffer utils . reallocate buffer ( particle group . class , m group buffer , 0 , m internal allocated capacity , capacity , false ) ; m user data buffer . data = reallocate buffer ( m user data buffer , m internal allocated capacity , capacity , true ) ; m internal allocated capacity = capacity ; } } if ( m count >= m internal allocated capacity ) { return settings . invalid particle index ; } int index = m count ++ ; m flags buffer . data [ index ] = def . flags ; m position buffer . data [ index ] . set ( def . position ) ; m velocity buffer . data [ index ] . set ( def . velocity ) ; m group buffer [ index ] = null ; if ( m depth buffer != null ) { m depth buffer [ index ] = 0 ; } if ( m color buffer . data != null || def . color != null ) { m color buffer . data = request particle buffer ( m color buffer . data class , m color buffer . data ) ; m color buffer . data [ index ] . set ( def . color ) ; } if ( m user data buffer . data != null || def . user data != null ) { m user data buffer . data = request particle buffer ( m user data buffer . data class , m user data buffer . data ) ; m user data buffer . data [ index ] = def . user data ; } if ( m proxy count >= m proxy capacity ) { int old capacity = m proxy capacity ; int new capacity = m proxy count != 0 ? 2 * m proxy count : settings . min particle buffer capacity ; m proxy buffer = buffer utils . reallocate buffer ( proxy . class , m proxy buffer , old capacity , new capacity ) ; m proxy capacity = new capacity ; } m proxy buffer [ m proxy count ++ ] . index = index ; return index ; } <SENTENCE_END/>

(Copy Probability: 3.9%)

<SENTENCE_START> { if ( m count >= m internal allocated capacity ) { int capacity = m count != 0 ? 2 * m count : settings . min particle buffer capacity ; capacity = limit capacity ( capacity , m max count ) ; capacity = limit capacity ( capacity , m flags buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m position buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m velocity buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m color buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m user data buffer . user supplied capacity ) ; if ( m internal allocated capacity < capacity ) { m flags buffer . data = reallocate buffer ( m flags buffer , m internal allocated capacity , capacity , false ) ; m position buffer . data = reallocate buffer ( m position buffer , m internal allocated capacity , capacity , false ) ; m velocity buffer . data = reallocate buffer ( m velocity buffer , m internal allocated capacity , capacity , false ) ; m accumulation buffer = buffer utils . reallocate buffer ( m accumulation buffer , 0 , m internal allocated capacity , capacity , false ) ; m accumulation 2 buffer = buffer utils . reallocate buffer ( vec 2 . class , m accumulation 2 buffer , 0 , m internal allocated capacity , capacity , true ) ; m depth buffer = buffer utils . reallocate buffer ( m depth buffer , 0 , m internal allocated capacity , capacity , true ) ; m color buffer . data = reallocate buffer ( m color buffer , m internal allocated capacity , capacity , true ) ; m group buffer = buffer utils . reallocate buffer ( particle group . class , m group buffer , 0 , m internal allocated capacity , capacity , false ) ; m user data buffer . data = reallocate buffer ( m user data buffer , m internal allocated capacity , capacity , true ) ; m internal allocated capacity = capacity ; } } if ( m count >= m internal allocated capacity ) { return settings . invalid particle index ; } int index = m count ++ ; m flags buffer . data [ index ] = def . flags ; m position buffer . data [ index ] . set ( def . position ) ; m velocity buffer . data [ index ] . set ( def . velocity ) ; m group buffer [ index ] = null ; if ( m depth buffer != null ) { m depth buffer [ index ] = 0 ; } if ( m color buffer . data != null || def . color != null ) { m color buffer . data = request particle buffer ( m color buffer . data class , m color buffer . data ) ; m color buffer . data [ index ] . set ( def . color ) ; } if ( m user data buffer . data != null || def . user data != null ) { m user data buffer . data = request particle buffer ( m user data buffer . data class , m user data buffer . data ) ; m user data buffer . data [ index ] = def . user data ; } if ( m proxy count >= m proxy capacity ) { int old capacity = m proxy capacity ; int new capacity = m proxy count != 0 ? 2 * m proxy count : settings . min particle buffer capacity ; m proxy buffer = buffer utils . reallocate buffer ( proxy . class , m proxy buffer , old capacity , new capacity ) ; m proxy capacity = new capacity ; } m proxy buffer [ m proxy count ++ ] . index = index ; return index ; } <SENTENCE_END/>

particle

<SENTENCE_START> { if ( m count >= m internal allocated capacity ) { int capacity = m count != 0 ? 2 * m count : settings . min particle buffer capacity ; capacity = limit capacity ( capacity , m max count ) ; capacity = limit capacity ( capacity , m flags buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m position buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m velocity buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m color buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m user data buffer . user supplied capacity ) ; if ( m internal allocated capacity < capacity ) { m flags buffer . data = reallocate buffer ( m flags buffer , m internal allocated capacity , capacity , false ) ; m position buffer . data = reallocate buffer ( m position buffer , m internal allocated capacity , capacity , false ) ; m velocity buffer . data = reallocate buffer ( m velocity buffer , m internal allocated capacity , capacity , false ) ; m accumulation buffer = buffer utils . reallocate buffer ( m accumulation buffer , 0 , m internal allocated capacity , capacity , false ) ; m accumulation 2 buffer = buffer utils . reallocate buffer ( vec 2 . class , m accumulation 2 buffer , 0 , m internal allocated capacity , capacity , true ) ; m depth buffer = buffer utils . reallocate buffer ( m depth buffer , 0 , m internal allocated capacity , capacity , true ) ; m color buffer . data = reallocate buffer ( m color buffer , m internal allocated capacity , capacity , true ) ; m group buffer = buffer utils . reallocate buffer ( particle group . class , m group buffer , 0 , m internal allocated capacity , capacity , false ) ; m user data buffer . data = reallocate buffer ( m user data buffer , m internal allocated capacity , capacity , true ) ; m internal allocated capacity = capacity ; } } if ( m count >= m internal allocated capacity ) { return settings . invalid particle index ; } int index = m count ++ ; m flags buffer . data [ index ] = def . flags ; m position buffer . data [ index ] . set ( def . position ) ; m velocity buffer . data [ index ] . set ( def . velocity ) ; m group buffer [ index ] = null ; if ( m depth buffer != null ) { m depth buffer [ index ] = 0 ; } if ( m color buffer . data != null || def . color != null ) { m color buffer . data = request particle buffer ( m color buffer . data class , m color buffer . data ) ; m color buffer . data [ index ] . set ( def . color ) ; } if ( m user data buffer . data != null || def . user data != null ) { m user data buffer . data = request particle buffer ( m user data buffer . data class , m user data buffer . data ) ; m user data buffer . data [ index ] = def . user data ; } if ( m proxy count >= m proxy capacity ) { int old capacity = m proxy capacity ; int new capacity = m proxy count != 0 ? 2 * m proxy count : settings . min particle buffer capacity ; m proxy buffer = buffer utils . reallocate buffer ( proxy . class , m proxy buffer , old capacity , new capacity ) ; m proxy capacity = new capacity ; } m proxy buffer [ m proxy count ++ ] . index = index ; return index ; } <SENTENCE_END/>

(Copy Probability: 17.8%)

<SENTENCE_START> { if ( m count >= m internal allocated capacity ) { int capacity = m count != 0 ? 2 * m count : settings . min particle buffer capacity ; capacity = limit capacity ( capacity , m max count ) ; capacity = limit capacity ( capacity , m flags buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m position buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m velocity buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m color buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m user data buffer . user supplied capacity ) ; if ( m internal allocated capacity < capacity ) { m flags buffer . data = reallocate buffer ( m flags buffer , m internal allocated capacity , capacity , false ) ; m position buffer . data = reallocate buffer ( m position buffer , m internal allocated capacity , capacity , false ) ; m velocity buffer . data = reallocate buffer ( m velocity buffer , m internal allocated capacity , capacity , false ) ; m accumulation buffer = buffer utils . reallocate buffer ( m accumulation buffer , 0 , m internal allocated capacity , capacity , false ) ; m accumulation 2 buffer = buffer utils . reallocate buffer ( vec 2 . class , m accumulation 2 buffer , 0 , m internal allocated capacity , capacity , true ) ; m depth buffer = buffer utils . reallocate buffer ( m depth buffer , 0 , m internal allocated capacity , capacity , true ) ; m color buffer . data = reallocate buffer ( m color buffer , m internal allocated capacity , capacity , true ) ; m group buffer = buffer utils . reallocate buffer ( particle group . class , m group buffer , 0 , m internal allocated capacity , capacity , false ) ; m user data buffer . data = reallocate buffer ( m user data buffer , m internal allocated capacity , capacity , true ) ; m internal allocated capacity = capacity ; } } if ( m count >= m internal allocated capacity ) { return settings . invalid particle index ; } int index = m count ++ ; m flags buffer . data [ index ] = def . flags ; m position buffer . data [ index ] . set ( def . position ) ; m velocity buffer . data [ index ] . set ( def . velocity ) ; m group buffer [ index ] = null ; if ( m depth buffer != null ) { m depth buffer [ index ] = 0 ; } if ( m color buffer . data != null || def . color != null ) { m color buffer . data = request particle buffer ( m color buffer . data class , m color buffer . data ) ; m color buffer . data [ index ] . set ( def . color ) ; } if ( m user data buffer . data != null || def . user data != null ) { m user data buffer . data = request particle buffer ( m user data buffer . data class , m user data buffer . data ) ; m user data buffer . data [ index ] = def . user data ; } if ( m proxy count >= m proxy capacity ) { int old capacity = m proxy capacity ; int new capacity = m proxy count != 0 ? 2 * m proxy count : settings . min particle buffer capacity ; m proxy buffer = buffer utils . reallocate buffer ( proxy . class , m proxy buffer , old capacity , new capacity ) ; m proxy capacity = new capacity ; } m proxy buffer [ m proxy count ++ ] . index = index ; return index ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( m count >= m internal allocated capacity ) { int capacity = m count != 0 ? 2 * m count : settings . min particle buffer capacity ; capacity = limit capacity ( capacity , m max count ) ; capacity = limit capacity ( capacity , m flags buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m position buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m velocity buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m color buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m user data buffer . user supplied capacity ) ; if ( m internal allocated capacity < capacity ) { m flags buffer . data = reallocate buffer ( m flags buffer , m internal allocated capacity , capacity , false ) ; m position buffer . data = reallocate buffer ( m position buffer , m internal allocated capacity , capacity , false ) ; m velocity buffer . data = reallocate buffer ( m velocity buffer , m internal allocated capacity , capacity , false ) ; m accumulation buffer = buffer utils . reallocate buffer ( m accumulation buffer , 0 , m internal allocated capacity , capacity , false ) ; m accumulation 2 buffer = buffer utils . reallocate buffer ( vec 2 . class , m accumulation 2 buffer , 0 , m internal allocated capacity , capacity , true ) ; m depth buffer = buffer utils . reallocate buffer ( m depth buffer , 0 , m internal allocated capacity , capacity , true ) ; m color buffer . data = reallocate buffer ( m color buffer , m internal allocated capacity , capacity , true ) ; m group buffer = buffer utils . reallocate buffer ( particle group . class , m group buffer , 0 , m internal allocated capacity , capacity , false ) ; m user data buffer . data = reallocate buffer ( m user data buffer , m internal allocated capacity , capacity , true ) ; m internal allocated capacity = capacity ; } } if ( m count >= m internal allocated capacity ) { return settings . invalid particle index ; } int index = m count ++ ; m flags buffer . data [ index ] = def . flags ; m position buffer . data [ index ] . set ( def . position ) ; m velocity buffer . data [ index ] . set ( def . velocity ) ; m group buffer [ index ] = null ; if ( m depth buffer != null ) { m depth buffer [ index ] = 0 ; } if ( m color buffer . data != null || def . color != null ) { m color buffer . data = request particle buffer ( m color buffer . data class , m color buffer . data ) ; m color buffer . data [ index ] . set ( def . color ) ; } if ( m user data buffer . data != null || def . user data != null ) { m user data buffer . data = request particle buffer ( m user data buffer . data class , m user data buffer . data ) ; m user data buffer . data [ index ] = def . user data ; } if ( m proxy count >= m proxy capacity ) { int old capacity = m proxy capacity ; int new capacity = m proxy count != 0 ? 2 * m proxy count : settings . min particle buffer capacity ; m proxy buffer = buffer utils . reallocate buffer ( proxy . class , m proxy buffer , old capacity , new capacity ) ; m proxy capacity = new capacity ; } m proxy buffer [ m proxy count ++ ] . index = index ; return index ; } <SENTENCE_END/>

(Copy Probability: 16.9%)

<SENTENCE_START> { if ( m count >= m internal allocated capacity ) { int capacity = m count != 0 ? 2 * m count : settings . min particle buffer capacity ; capacity = limit capacity ( capacity , m max count ) ; capacity = limit capacity ( capacity , m flags buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m position buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m velocity buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m color buffer . user supplied capacity ) ; capacity = limit capacity ( capacity , m user data buffer . user supplied capacity ) ; if ( m internal allocated capacity < capacity ) { m flags buffer . data = reallocate buffer ( m flags buffer , m internal allocated capacity , capacity , false ) ; m position buffer . data = reallocate buffer ( m position buffer , m internal allocated capacity , capacity , false ) ; m velocity buffer . data = reallocate buffer ( m velocity buffer , m internal allocated capacity , capacity , false ) ; m accumulation buffer = buffer utils . reallocate buffer ( m accumulation buffer , 0 , m internal allocated capacity , capacity , false ) ; m accumulation 2 buffer = buffer utils . reallocate buffer ( vec 2 . class , m accumulation 2 buffer , 0 , m internal allocated capacity , capacity , true ) ; m depth buffer = buffer utils . reallocate buffer ( m depth buffer , 0 , m internal allocated capacity , capacity , true ) ; m color buffer . data = reallocate buffer ( m color buffer , m internal allocated capacity , capacity , true ) ; m group buffer = buffer utils . reallocate buffer ( particle group . class , m group buffer , 0 , m internal allocated capacity , capacity , false ) ; m user data buffer . data = reallocate buffer ( m user data buffer , m internal allocated capacity , capacity , true ) ; m internal allocated capacity = capacity ; } } if ( m count >= m internal allocated capacity ) { return settings . invalid particle index ; } int index = m count ++ ; m flags buffer . data [ index ] = def . flags ; m position buffer . data [ index ] . set ( def . position ) ; m velocity buffer . data [ index ] . set ( def . velocity ) ; m group buffer [ index ] = null ; if ( m depth buffer != null ) { m depth buffer [ index ] = 0 ; } if ( m color buffer . data != null || def . color != null ) { m color buffer . data = request particle buffer ( m color buffer . data class , m color buffer . data ) ; m color buffer . data [ index ] . set ( def . color ) ; } if ( m user data buffer . data != null || def . user data != null ) { m user data buffer . data = request particle buffer ( m user data buffer . data class , m user data buffer . data ) ; m user data buffer . data [ index ] = def . user data ; } if ( m proxy count >= m proxy capacity ) { int old capacity = m proxy capacity ; int new capacity = m proxy count != 0 ? 2 * m proxy count : settings . min particle buffer capacity ; m proxy buffer = buffer utils . reallocate buffer ( proxy . class , m proxy buffer , old capacity , new capacity ) ; m proxy capacity = new capacity ; } m proxy buffer [ m proxy count ++ ] . index = index ; return index ; } <SENTENCE_END/>


Original Name destroy,particle

destroy

<SENTENCE_START> { int flags = particle type . b 2 zombie particle ; if ( call destruction listener ) { flags |= particle type . b 2 destruction listener ; } m flags buffer . data [ index ] |= flags ; } <SENTENCE_END/>

(Copy Probability: 2.2%)

<SENTENCE_START> { int flags = particle type . b 2 zombie particle ; if ( call destruction listener ) { flags |= particle type . b 2 destruction listener ; } m flags buffer . data [ index ] |= flags ; } <SENTENCE_END/>

particle

<SENTENCE_START> { int flags = particle type . b 2 zombie particle ; if ( call destruction listener ) { flags |= particle type . b 2 destruction listener ; } m flags buffer . data [ index ] |= flags ; } <SENTENCE_END/>

(Copy Probability: 23.5%)

<SENTENCE_START> { int flags = particle type . b 2 zombie particle ; if ( call destruction listener ) { flags |= particle type . b 2 destruction listener ; } m flags buffer . data [ index ] |= flags ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int flags = particle type . b 2 zombie particle ; if ( call destruction listener ) { flags |= particle type . b 2 destruction listener ; } m flags buffer . data [ index ] |= flags ; } <SENTENCE_END/>

(Copy Probability: 6.3%)

<SENTENCE_START> { int flags = particle type . b 2 zombie particle ; if ( call destruction listener ) { flags |= particle type . b 2 destruction listener ; } m flags buffer . data [ index ] |= flags ; } <SENTENCE_END/>


Original Name destroy,particles,in,shape

destroy

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

(Copy Probability: 6.6%)

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

particles

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

(Copy Probability: 56.8%)

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

in

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

(Copy Probability: 15.7%)

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

shape

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

(Copy Probability: 5.0%)

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>

(Copy Probability: 7.7%)

<SENTENCE_START> { dpcallback . init ( this , shape , xf , call destruction listener ) ; shape . compute aabb ( temp , xf , 0 ) ; m world . query aabb ( dpcallback , temp ) ; return dpcallback . destroyed ; } <SENTENCE_END/>


Original Name destroy,particles,in,group

destroy

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

particles

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

(Copy Probability: 45.2%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

in

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

(Copy Probability: 8.6%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

group

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

(Copy Probability: 4.6%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>

(Copy Probability: 7.7%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { destroy particle ( i , call destruction listener ) ; } } <SENTENCE_END/>


Original Name create,particle,group

create

<SENTENCE_START> { float stride = get particle stride ( ) ; final transform identity = temp transform ; identity . set identity ( ) ; transform transform = temp transform 2 ; transform . set identity ( ) ; int first index = m count ; if ( group def . shape != null ) { final particle def particle def = temp particle def ; particle def . flags = group def . flags ; particle def . color = group def . color ; particle def . user data = group def . user data ; shape shape = group def . shape ; transform . set ( group def . position , group def . angle ) ; aabb aabb = temp ; int child count = shape . get child count ( ) ; for ( int child index = 0 ; child index < child count ; child index ++ ) { if ( child index == 0 ) { shape . compute aabb ( aabb , identity , child index ) ; } else { aabb child aabb = temp 2 ; shape . compute aabb ( child aabb , identity , child index ) ; aabb . combine ( child aabb ) ; } } final float upper bound y = aabb . upper bound . y ; final float upper bound x = aabb . upper bound . x ; for ( float y = math utils . floor ( aabb . lower bound . y / stride ) * stride ; y < upper bound y ; y += stride ) { for ( float x = math utils . floor ( aabb . lower bound . x / stride ) * stride ; x < upper bound x ; x += stride ) { vec 2 p = temp vec ; p . x = x ; p . y = y ; if ( shape . test point ( identity , p ) ) { transform . mul to out ( transform , p , p ) ; particle def . position . x = p . x ; particle def . position . y = p . y ; p . sub local ( group def . position ) ; vec 2 . cross to out unsafe ( group def . angular velocity , p , particle def . velocity ) ; particle def . velocity . add local ( group def . linear velocity ) ; create particle ( particle def ) ; } } } } int last index = m count ; particle group group = new particle group ( ) ; group . m system = this ; group . m first index = first index ; group . m last index = last index ; group . m group flags = group def . group flags ; group . m strength = group def . strength ; group . m user data = group def . user data ; group . m transform . set ( transform ) ; group . m destroy automatically = group def . destroy automatically ; group . m prev = null ; group . m next = m group list ; if ( m group list != null ) { m group list . m prev = group ; } m group list = group ; ++ m group count ; for ( int i = first index ; i < last index ; i ++ ) { m group buffer [ i ] = group ; } update contacts ( true ) ; if ( ( group def . flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( first index <= a && b < last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = group def . strength ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( group def . flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( last index - first index ) ; for ( int i = first index ; i < last index ; i ++ ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } diagram . generate ( stride / 2 ) ; create particle group callback . system = this ; create particle group callback . def = group def ; create particle group callback . first index = first index ; diagram . get nodes ( create particle group callback ) ; } if ( ( group def . group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group ) ; } return group ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { float stride = get particle stride ( ) ; final transform identity = temp transform ; identity . set identity ( ) ; transform transform = temp transform 2 ; transform . set identity ( ) ; int first index = m count ; if ( group def . shape != null ) { final particle def particle def = temp particle def ; particle def . flags = group def . flags ; particle def . color = group def . color ; particle def . user data = group def . user data ; shape shape = group def . shape ; transform . set ( group def . position , group def . angle ) ; aabb aabb = temp ; int child count = shape . get child count ( ) ; for ( int child index = 0 ; child index < child count ; child index ++ ) { if ( child index == 0 ) { shape . compute aabb ( aabb , identity , child index ) ; } else { aabb child aabb = temp 2 ; shape . compute aabb ( child aabb , identity , child index ) ; aabb . combine ( child aabb ) ; } } final float upper bound y = aabb . upper bound . y ; final float upper bound x = aabb . upper bound . x ; for ( float y = math utils . floor ( aabb . lower bound . y / stride ) * stride ; y < upper bound y ; y += stride ) { for ( float x = math utils . floor ( aabb . lower bound . x / stride ) * stride ; x < upper bound x ; x += stride ) { vec 2 p = temp vec ; p . x = x ; p . y = y ; if ( shape . test point ( identity , p ) ) { transform . mul to out ( transform , p , p ) ; particle def . position . x = p . x ; particle def . position . y = p . y ; p . sub local ( group def . position ) ; vec 2 . cross to out unsafe ( group def . angular velocity , p , particle def . velocity ) ; particle def . velocity . add local ( group def . linear velocity ) ; create particle ( particle def ) ; } } } } int last index = m count ; particle group group = new particle group ( ) ; group . m system = this ; group . m first index = first index ; group . m last index = last index ; group . m group flags = group def . group flags ; group . m strength = group def . strength ; group . m user data = group def . user data ; group . m transform . set ( transform ) ; group . m destroy automatically = group def . destroy automatically ; group . m prev = null ; group . m next = m group list ; if ( m group list != null ) { m group list . m prev = group ; } m group list = group ; ++ m group count ; for ( int i = first index ; i < last index ; i ++ ) { m group buffer [ i ] = group ; } update contacts ( true ) ; if ( ( group def . flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( first index <= a && b < last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = group def . strength ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( group def . flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( last index - first index ) ; for ( int i = first index ; i < last index ; i ++ ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } diagram . generate ( stride / 2 ) ; create particle group callback . system = this ; create particle group callback . def = group def ; create particle group callback . first index = first index ; diagram . get nodes ( create particle group callback ) ; } if ( ( group def . group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group ) ; } return group ; } <SENTENCE_END/>

particle

<SENTENCE_START> { float stride = get particle stride ( ) ; final transform identity = temp transform ; identity . set identity ( ) ; transform transform = temp transform 2 ; transform . set identity ( ) ; int first index = m count ; if ( group def . shape != null ) { final particle def particle def = temp particle def ; particle def . flags = group def . flags ; particle def . color = group def . color ; particle def . user data = group def . user data ; shape shape = group def . shape ; transform . set ( group def . position , group def . angle ) ; aabb aabb = temp ; int child count = shape . get child count ( ) ; for ( int child index = 0 ; child index < child count ; child index ++ ) { if ( child index == 0 ) { shape . compute aabb ( aabb , identity , child index ) ; } else { aabb child aabb = temp 2 ; shape . compute aabb ( child aabb , identity , child index ) ; aabb . combine ( child aabb ) ; } } final float upper bound y = aabb . upper bound . y ; final float upper bound x = aabb . upper bound . x ; for ( float y = math utils . floor ( aabb . lower bound . y / stride ) * stride ; y < upper bound y ; y += stride ) { for ( float x = math utils . floor ( aabb . lower bound . x / stride ) * stride ; x < upper bound x ; x += stride ) { vec 2 p = temp vec ; p . x = x ; p . y = y ; if ( shape . test point ( identity , p ) ) { transform . mul to out ( transform , p , p ) ; particle def . position . x = p . x ; particle def . position . y = p . y ; p . sub local ( group def . position ) ; vec 2 . cross to out unsafe ( group def . angular velocity , p , particle def . velocity ) ; particle def . velocity . add local ( group def . linear velocity ) ; create particle ( particle def ) ; } } } } int last index = m count ; particle group group = new particle group ( ) ; group . m system = this ; group . m first index = first index ; group . m last index = last index ; group . m group flags = group def . group flags ; group . m strength = group def . strength ; group . m user data = group def . user data ; group . m transform . set ( transform ) ; group . m destroy automatically = group def . destroy automatically ; group . m prev = null ; group . m next = m group list ; if ( m group list != null ) { m group list . m prev = group ; } m group list = group ; ++ m group count ; for ( int i = first index ; i < last index ; i ++ ) { m group buffer [ i ] = group ; } update contacts ( true ) ; if ( ( group def . flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( first index <= a && b < last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = group def . strength ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( group def . flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( last index - first index ) ; for ( int i = first index ; i < last index ; i ++ ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } diagram . generate ( stride / 2 ) ; create particle group callback . system = this ; create particle group callback . def = group def ; create particle group callback . first index = first index ; diagram . get nodes ( create particle group callback ) ; } if ( ( group def . group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group ) ; } return group ; } <SENTENCE_END/>

(Copy Probability: 12.9%)

<SENTENCE_START> { float stride = get particle stride ( ) ; final transform identity = temp transform ; identity . set identity ( ) ; transform transform = temp transform 2 ; transform . set identity ( ) ; int first index = m count ; if ( group def . shape != null ) { final particle def particle def = temp particle def ; particle def . flags = group def . flags ; particle def . color = group def . color ; particle def . user data = group def . user data ; shape shape = group def . shape ; transform . set ( group def . position , group def . angle ) ; aabb aabb = temp ; int child count = shape . get child count ( ) ; for ( int child index = 0 ; child index < child count ; child index ++ ) { if ( child index == 0 ) { shape . compute aabb ( aabb , identity , child index ) ; } else { aabb child aabb = temp 2 ; shape . compute aabb ( child aabb , identity , child index ) ; aabb . combine ( child aabb ) ; } } final float upper bound y = aabb . upper bound . y ; final float upper bound x = aabb . upper bound . x ; for ( float y = math utils . floor ( aabb . lower bound . y / stride ) * stride ; y < upper bound y ; y += stride ) { for ( float x = math utils . floor ( aabb . lower bound . x / stride ) * stride ; x < upper bound x ; x += stride ) { vec 2 p = temp vec ; p . x = x ; p . y = y ; if ( shape . test point ( identity , p ) ) { transform . mul to out ( transform , p , p ) ; particle def . position . x = p . x ; particle def . position . y = p . y ; p . sub local ( group def . position ) ; vec 2 . cross to out unsafe ( group def . angular velocity , p , particle def . velocity ) ; particle def . velocity . add local ( group def . linear velocity ) ; create particle ( particle def ) ; } } } } int last index = m count ; particle group group = new particle group ( ) ; group . m system = this ; group . m first index = first index ; group . m last index = last index ; group . m group flags = group def . group flags ; group . m strength = group def . strength ; group . m user data = group def . user data ; group . m transform . set ( transform ) ; group . m destroy automatically = group def . destroy automatically ; group . m prev = null ; group . m next = m group list ; if ( m group list != null ) { m group list . m prev = group ; } m group list = group ; ++ m group count ; for ( int i = first index ; i < last index ; i ++ ) { m group buffer [ i ] = group ; } update contacts ( true ) ; if ( ( group def . flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( first index <= a && b < last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = group def . strength ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( group def . flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( last index - first index ) ; for ( int i = first index ; i < last index ; i ++ ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } diagram . generate ( stride / 2 ) ; create particle group callback . system = this ; create particle group callback . def = group def ; create particle group callback . first index = first index ; diagram . get nodes ( create particle group callback ) ; } if ( ( group def . group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group ) ; } return group ; } <SENTENCE_END/>

group

<SENTENCE_START> { float stride = get particle stride ( ) ; final transform identity = temp transform ; identity . set identity ( ) ; transform transform = temp transform 2 ; transform . set identity ( ) ; int first index = m count ; if ( group def . shape != null ) { final particle def particle def = temp particle def ; particle def . flags = group def . flags ; particle def . color = group def . color ; particle def . user data = group def . user data ; shape shape = group def . shape ; transform . set ( group def . position , group def . angle ) ; aabb aabb = temp ; int child count = shape . get child count ( ) ; for ( int child index = 0 ; child index < child count ; child index ++ ) { if ( child index == 0 ) { shape . compute aabb ( aabb , identity , child index ) ; } else { aabb child aabb = temp 2 ; shape . compute aabb ( child aabb , identity , child index ) ; aabb . combine ( child aabb ) ; } } final float upper bound y = aabb . upper bound . y ; final float upper bound x = aabb . upper bound . x ; for ( float y = math utils . floor ( aabb . lower bound . y / stride ) * stride ; y < upper bound y ; y += stride ) { for ( float x = math utils . floor ( aabb . lower bound . x / stride ) * stride ; x < upper bound x ; x += stride ) { vec 2 p = temp vec ; p . x = x ; p . y = y ; if ( shape . test point ( identity , p ) ) { transform . mul to out ( transform , p , p ) ; particle def . position . x = p . x ; particle def . position . y = p . y ; p . sub local ( group def . position ) ; vec 2 . cross to out unsafe ( group def . angular velocity , p , particle def . velocity ) ; particle def . velocity . add local ( group def . linear velocity ) ; create particle ( particle def ) ; } } } } int last index = m count ; particle group group = new particle group ( ) ; group . m system = this ; group . m first index = first index ; group . m last index = last index ; group . m group flags = group def . group flags ; group . m strength = group def . strength ; group . m user data = group def . user data ; group . m transform . set ( transform ) ; group . m destroy automatically = group def . destroy automatically ; group . m prev = null ; group . m next = m group list ; if ( m group list != null ) { m group list . m prev = group ; } m group list = group ; ++ m group count ; for ( int i = first index ; i < last index ; i ++ ) { m group buffer [ i ] = group ; } update contacts ( true ) ; if ( ( group def . flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( first index <= a && b < last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = group def . strength ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( group def . flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( last index - first index ) ; for ( int i = first index ; i < last index ; i ++ ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } diagram . generate ( stride / 2 ) ; create particle group callback . system = this ; create particle group callback . def = group def ; create particle group callback . first index = first index ; diagram . get nodes ( create particle group callback ) ; } if ( ( group def . group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group ) ; } return group ; } <SENTENCE_END/>

(Copy Probability: 9.6%)

<SENTENCE_START> { float stride = get particle stride ( ) ; final transform identity = temp transform ; identity . set identity ( ) ; transform transform = temp transform 2 ; transform . set identity ( ) ; int first index = m count ; if ( group def . shape != null ) { final particle def particle def = temp particle def ; particle def . flags = group def . flags ; particle def . color = group def . color ; particle def . user data = group def . user data ; shape shape = group def . shape ; transform . set ( group def . position , group def . angle ) ; aabb aabb = temp ; int child count = shape . get child count ( ) ; for ( int child index = 0 ; child index < child count ; child index ++ ) { if ( child index == 0 ) { shape . compute aabb ( aabb , identity , child index ) ; } else { aabb child aabb = temp 2 ; shape . compute aabb ( child aabb , identity , child index ) ; aabb . combine ( child aabb ) ; } } final float upper bound y = aabb . upper bound . y ; final float upper bound x = aabb . upper bound . x ; for ( float y = math utils . floor ( aabb . lower bound . y / stride ) * stride ; y < upper bound y ; y += stride ) { for ( float x = math utils . floor ( aabb . lower bound . x / stride ) * stride ; x < upper bound x ; x += stride ) { vec 2 p = temp vec ; p . x = x ; p . y = y ; if ( shape . test point ( identity , p ) ) { transform . mul to out ( transform , p , p ) ; particle def . position . x = p . x ; particle def . position . y = p . y ; p . sub local ( group def . position ) ; vec 2 . cross to out unsafe ( group def . angular velocity , p , particle def . velocity ) ; particle def . velocity . add local ( group def . linear velocity ) ; create particle ( particle def ) ; } } } } int last index = m count ; particle group group = new particle group ( ) ; group . m system = this ; group . m first index = first index ; group . m last index = last index ; group . m group flags = group def . group flags ; group . m strength = group def . strength ; group . m user data = group def . user data ; group . m transform . set ( transform ) ; group . m destroy automatically = group def . destroy automatically ; group . m prev = null ; group . m next = m group list ; if ( m group list != null ) { m group list . m prev = group ; } m group list = group ; ++ m group count ; for ( int i = first index ; i < last index ; i ++ ) { m group buffer [ i ] = group ; } update contacts ( true ) ; if ( ( group def . flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( first index <= a && b < last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = group def . strength ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( group def . flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( last index - first index ) ; for ( int i = first index ; i < last index ; i ++ ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } diagram . generate ( stride / 2 ) ; create particle group callback . system = this ; create particle group callback . def = group def ; create particle group callback . first index = first index ; diagram . get nodes ( create particle group callback ) ; } if ( ( group def . group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group ) ; } return group ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { float stride = get particle stride ( ) ; final transform identity = temp transform ; identity . set identity ( ) ; transform transform = temp transform 2 ; transform . set identity ( ) ; int first index = m count ; if ( group def . shape != null ) { final particle def particle def = temp particle def ; particle def . flags = group def . flags ; particle def . color = group def . color ; particle def . user data = group def . user data ; shape shape = group def . shape ; transform . set ( group def . position , group def . angle ) ; aabb aabb = temp ; int child count = shape . get child count ( ) ; for ( int child index = 0 ; child index < child count ; child index ++ ) { if ( child index == 0 ) { shape . compute aabb ( aabb , identity , child index ) ; } else { aabb child aabb = temp 2 ; shape . compute aabb ( child aabb , identity , child index ) ; aabb . combine ( child aabb ) ; } } final float upper bound y = aabb . upper bound . y ; final float upper bound x = aabb . upper bound . x ; for ( float y = math utils . floor ( aabb . lower bound . y / stride ) * stride ; y < upper bound y ; y += stride ) { for ( float x = math utils . floor ( aabb . lower bound . x / stride ) * stride ; x < upper bound x ; x += stride ) { vec 2 p = temp vec ; p . x = x ; p . y = y ; if ( shape . test point ( identity , p ) ) { transform . mul to out ( transform , p , p ) ; particle def . position . x = p . x ; particle def . position . y = p . y ; p . sub local ( group def . position ) ; vec 2 . cross to out unsafe ( group def . angular velocity , p , particle def . velocity ) ; particle def . velocity . add local ( group def . linear velocity ) ; create particle ( particle def ) ; } } } } int last index = m count ; particle group group = new particle group ( ) ; group . m system = this ; group . m first index = first index ; group . m last index = last index ; group . m group flags = group def . group flags ; group . m strength = group def . strength ; group . m user data = group def . user data ; group . m transform . set ( transform ) ; group . m destroy automatically = group def . destroy automatically ; group . m prev = null ; group . m next = m group list ; if ( m group list != null ) { m group list . m prev = group ; } m group list = group ; ++ m group count ; for ( int i = first index ; i < last index ; i ++ ) { m group buffer [ i ] = group ; } update contacts ( true ) ; if ( ( group def . flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( first index <= a && b < last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = group def . strength ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( group def . flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( last index - first index ) ; for ( int i = first index ; i < last index ; i ++ ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } diagram . generate ( stride / 2 ) ; create particle group callback . system = this ; create particle group callback . def = group def ; create particle group callback . first index = first index ; diagram . get nodes ( create particle group callback ) ; } if ( ( group def . group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group ) ; } return group ; } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { float stride = get particle stride ( ) ; final transform identity = temp transform ; identity . set identity ( ) ; transform transform = temp transform 2 ; transform . set identity ( ) ; int first index = m count ; if ( group def . shape != null ) { final particle def particle def = temp particle def ; particle def . flags = group def . flags ; particle def . color = group def . color ; particle def . user data = group def . user data ; shape shape = group def . shape ; transform . set ( group def . position , group def . angle ) ; aabb aabb = temp ; int child count = shape . get child count ( ) ; for ( int child index = 0 ; child index < child count ; child index ++ ) { if ( child index == 0 ) { shape . compute aabb ( aabb , identity , child index ) ; } else { aabb child aabb = temp 2 ; shape . compute aabb ( child aabb , identity , child index ) ; aabb . combine ( child aabb ) ; } } final float upper bound y = aabb . upper bound . y ; final float upper bound x = aabb . upper bound . x ; for ( float y = math utils . floor ( aabb . lower bound . y / stride ) * stride ; y < upper bound y ; y += stride ) { for ( float x = math utils . floor ( aabb . lower bound . x / stride ) * stride ; x < upper bound x ; x += stride ) { vec 2 p = temp vec ; p . x = x ; p . y = y ; if ( shape . test point ( identity , p ) ) { transform . mul to out ( transform , p , p ) ; particle def . position . x = p . x ; particle def . position . y = p . y ; p . sub local ( group def . position ) ; vec 2 . cross to out unsafe ( group def . angular velocity , p , particle def . velocity ) ; particle def . velocity . add local ( group def . linear velocity ) ; create particle ( particle def ) ; } } } } int last index = m count ; particle group group = new particle group ( ) ; group . m system = this ; group . m first index = first index ; group . m last index = last index ; group . m group flags = group def . group flags ; group . m strength = group def . strength ; group . m user data = group def . user data ; group . m transform . set ( transform ) ; group . m destroy automatically = group def . destroy automatically ; group . m prev = null ; group . m next = m group list ; if ( m group list != null ) { m group list . m prev = group ; } m group list = group ; ++ m group count ; for ( int i = first index ; i < last index ; i ++ ) { m group buffer [ i ] = group ; } update contacts ( true ) ; if ( ( group def . flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( first index <= a && b < last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = group def . strength ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( group def . flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( last index - first index ) ; for ( int i = first index ; i < last index ; i ++ ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } diagram . generate ( stride / 2 ) ; create particle group callback . system = this ; create particle group callback . def = group def ; create particle group callback . first index = first index ; diagram . get nodes ( create particle group callback ) ; } if ( ( group def . group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group ) ; } return group ; } <SENTENCE_END/>


Original Name join,particle,groups

join

<SENTENCE_START> { assert ( group a != group b ) ; rotate buffer ( group b . m first index , group b . m last index , m count ) ; assert ( group b . m last index == m count ) ; rotate buffer ( group a . m first index , group a . m last index , group b . m first index ) ; assert ( group a . m last index == group b . m first index ) ; int particle flags = 0 ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { particle flags |= m flags buffer . data [ i ] ; } update contacts ( true ) ; if ( ( particle flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( group a . m first index <= a && a < group a . m last index && group b . m first index <= b && b < group b . m last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = math utils . min ( group a . m strength , group b . m strength ) ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( particle flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( group b . m last index - group a . m first index ) ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 zombie particle ) == 0 ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } } diagram . generate ( get particle stride ( ) / 2 ) ; join particle groups callback callback = new join particle groups callback ( ) ; callback . system = this ; callback . group a = group a ; callback . group b = group b ; diagram . get nodes ( callback ) ; } for ( int i = group b . m first index ; i < group b . m last index ; i ++ ) { m group buffer [ i ] = group a ; } int group flags = group a . m group flags | group b . m group flags ; group a . m group flags = group flags ; group a . m last index = group b . m last index ; group b . m first index = group b . m last index ; destroy particle group ( group b ) ; if ( ( group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group a ) ; } } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { assert ( group a != group b ) ; rotate buffer ( group b . m first index , group b . m last index , m count ) ; assert ( group b . m last index == m count ) ; rotate buffer ( group a . m first index , group a . m last index , group b . m first index ) ; assert ( group a . m last index == group b . m first index ) ; int particle flags = 0 ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { particle flags |= m flags buffer . data [ i ] ; } update contacts ( true ) ; if ( ( particle flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( group a . m first index <= a && a < group a . m last index && group b . m first index <= b && b < group b . m last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = math utils . min ( group a . m strength , group b . m strength ) ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( particle flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( group b . m last index - group a . m first index ) ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 zombie particle ) == 0 ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } } diagram . generate ( get particle stride ( ) / 2 ) ; join particle groups callback callback = new join particle groups callback ( ) ; callback . system = this ; callback . group a = group a ; callback . group b = group b ; diagram . get nodes ( callback ) ; } for ( int i = group b . m first index ; i < group b . m last index ; i ++ ) { m group buffer [ i ] = group a ; } int group flags = group a . m group flags | group b . m group flags ; group a . m group flags = group flags ; group a . m last index = group b . m last index ; group b . m first index = group b . m last index ; destroy particle group ( group b ) ; if ( ( group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group a ) ; } } <SENTENCE_END/>

particle

<SENTENCE_START> { assert ( group a != group b ) ; rotate buffer ( group b . m first index , group b . m last index , m count ) ; assert ( group b . m last index == m count ) ; rotate buffer ( group a . m first index , group a . m last index , group b . m first index ) ; assert ( group a . m last index == group b . m first index ) ; int particle flags = 0 ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { particle flags |= m flags buffer . data [ i ] ; } update contacts ( true ) ; if ( ( particle flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( group a . m first index <= a && a < group a . m last index && group b . m first index <= b && b < group b . m last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = math utils . min ( group a . m strength , group b . m strength ) ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( particle flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( group b . m last index - group a . m first index ) ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 zombie particle ) == 0 ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } } diagram . generate ( get particle stride ( ) / 2 ) ; join particle groups callback callback = new join particle groups callback ( ) ; callback . system = this ; callback . group a = group a ; callback . group b = group b ; diagram . get nodes ( callback ) ; } for ( int i = group b . m first index ; i < group b . m last index ; i ++ ) { m group buffer [ i ] = group a ; } int group flags = group a . m group flags | group b . m group flags ; group a . m group flags = group flags ; group a . m last index = group b . m last index ; group b . m first index = group b . m last index ; destroy particle group ( group b ) ; if ( ( group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group a ) ; } } <SENTENCE_END/>

(Copy Probability: 16.5%)

<SENTENCE_START> { assert ( group a != group b ) ; rotate buffer ( group b . m first index , group b . m last index , m count ) ; assert ( group b . m last index == m count ) ; rotate buffer ( group a . m first index , group a . m last index , group b . m first index ) ; assert ( group a . m last index == group b . m first index ) ; int particle flags = 0 ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { particle flags |= m flags buffer . data [ i ] ; } update contacts ( true ) ; if ( ( particle flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( group a . m first index <= a && a < group a . m last index && group b . m first index <= b && b < group b . m last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = math utils . min ( group a . m strength , group b . m strength ) ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( particle flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( group b . m last index - group a . m first index ) ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 zombie particle ) == 0 ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } } diagram . generate ( get particle stride ( ) / 2 ) ; join particle groups callback callback = new join particle groups callback ( ) ; callback . system = this ; callback . group a = group a ; callback . group b = group b ; diagram . get nodes ( callback ) ; } for ( int i = group b . m first index ; i < group b . m last index ; i ++ ) { m group buffer [ i ] = group a ; } int group flags = group a . m group flags | group b . m group flags ; group a . m group flags = group flags ; group a . m last index = group b . m last index ; group b . m first index = group b . m last index ; destroy particle group ( group b ) ; if ( ( group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group a ) ; } } <SENTENCE_END/>

groups

<SENTENCE_START> { assert ( group a != group b ) ; rotate buffer ( group b . m first index , group b . m last index , m count ) ; assert ( group b . m last index == m count ) ; rotate buffer ( group a . m first index , group a . m last index , group b . m first index ) ; assert ( group a . m last index == group b . m first index ) ; int particle flags = 0 ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { particle flags |= m flags buffer . data [ i ] ; } update contacts ( true ) ; if ( ( particle flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( group a . m first index <= a && a < group a . m last index && group b . m first index <= b && b < group b . m last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = math utils . min ( group a . m strength , group b . m strength ) ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( particle flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( group b . m last index - group a . m first index ) ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 zombie particle ) == 0 ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } } diagram . generate ( get particle stride ( ) / 2 ) ; join particle groups callback callback = new join particle groups callback ( ) ; callback . system = this ; callback . group a = group a ; callback . group b = group b ; diagram . get nodes ( callback ) ; } for ( int i = group b . m first index ; i < group b . m last index ; i ++ ) { m group buffer [ i ] = group a ; } int group flags = group a . m group flags | group b . m group flags ; group a . m group flags = group flags ; group a . m last index = group b . m last index ; group b . m first index = group b . m last index ; destroy particle group ( group b ) ; if ( ( group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group a ) ; } } <SENTENCE_END/>

(Copy Probability: 12.0%)

<SENTENCE_START> { assert ( group a != group b ) ; rotate buffer ( group b . m first index , group b . m last index , m count ) ; assert ( group b . m last index == m count ) ; rotate buffer ( group a . m first index , group a . m last index , group b . m first index ) ; assert ( group a . m last index == group b . m first index ) ; int particle flags = 0 ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { particle flags |= m flags buffer . data [ i ] ; } update contacts ( true ) ; if ( ( particle flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( group a . m first index <= a && a < group a . m last index && group b . m first index <= b && b < group b . m last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = math utils . min ( group a . m strength , group b . m strength ) ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( particle flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( group b . m last index - group a . m first index ) ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 zombie particle ) == 0 ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } } diagram . generate ( get particle stride ( ) / 2 ) ; join particle groups callback callback = new join particle groups callback ( ) ; callback . system = this ; callback . group a = group a ; callback . group b = group b ; diagram . get nodes ( callback ) ; } for ( int i = group b . m first index ; i < group b . m last index ; i ++ ) { m group buffer [ i ] = group a ; } int group flags = group a . m group flags | group b . m group flags ; group a . m group flags = group flags ; group a . m last index = group b . m last index ; group b . m first index = group b . m last index ; destroy particle group ( group b ) ; if ( ( group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group a ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { assert ( group a != group b ) ; rotate buffer ( group b . m first index , group b . m last index , m count ) ; assert ( group b . m last index == m count ) ; rotate buffer ( group a . m first index , group a . m last index , group b . m first index ) ; assert ( group a . m last index == group b . m first index ) ; int particle flags = 0 ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { particle flags |= m flags buffer . data [ i ] ; } update contacts ( true ) ; if ( ( particle flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( group a . m first index <= a && a < group a . m last index && group b . m first index <= b && b < group b . m last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = math utils . min ( group a . m strength , group b . m strength ) ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( particle flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( group b . m last index - group a . m first index ) ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 zombie particle ) == 0 ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } } diagram . generate ( get particle stride ( ) / 2 ) ; join particle groups callback callback = new join particle groups callback ( ) ; callback . system = this ; callback . group a = group a ; callback . group b = group b ; diagram . get nodes ( callback ) ; } for ( int i = group b . m first index ; i < group b . m last index ; i ++ ) { m group buffer [ i ] = group a ; } int group flags = group a . m group flags | group b . m group flags ; group a . m group flags = group flags ; group a . m last index = group b . m last index ; group b . m first index = group b . m last index ; destroy particle group ( group b ) ; if ( ( group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group a ) ; } } <SENTENCE_END/>

(Copy Probability: 4.6%)

<SENTENCE_START> { assert ( group a != group b ) ; rotate buffer ( group b . m first index , group b . m last index , m count ) ; assert ( group b . m last index == m count ) ; rotate buffer ( group a . m first index , group a . m last index , group b . m first index ) ; assert ( group a . m last index == group b . m first index ) ; int particle flags = 0 ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { particle flags |= m flags buffer . data [ i ] ; } update contacts ( true ) ; if ( ( particle flags & k pair flags ) != 0 ) { for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a > b ) { int temp = a ; a = b ; b = temp ; } if ( group a . m first index <= a && a < group a . m last index && group b . m first index <= b && b < group b . m last index ) { if ( m pair count >= m pair capacity ) { int old capacity = m pair capacity ; int new capacity = m pair count != 0 ? 2 * m pair count : settings . min particle buffer capacity ; m pair buffer = buffer utils . reallocate buffer ( pair . class , m pair buffer , old capacity , new capacity ) ; m pair capacity = new capacity ; } pair pair = m pair buffer [ m pair count ] ; pair . index a = a ; pair . index b = b ; pair . flags = contact . flags ; pair . strength = math utils . min ( group a . m strength , group b . m strength ) ; pair . distance = math utils . distance ( m position buffer . data [ a ] , m position buffer . data [ b ] ) ; m pair count ++ ; } } } if ( ( particle flags & k triad flags ) != 0 ) { voronoi diagram diagram = new voronoi diagram ( group b . m last index - group a . m first index ) ; for ( int i = group a . m first index ; i < group b . m last index ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 zombie particle ) == 0 ) { diagram . add generator ( m position buffer . data [ i ] , i ) ; } } diagram . generate ( get particle stride ( ) / 2 ) ; join particle groups callback callback = new join particle groups callback ( ) ; callback . system = this ; callback . group a = group a ; callback . group b = group b ; diagram . get nodes ( callback ) ; } for ( int i = group b . m first index ; i < group b . m last index ; i ++ ) { m group buffer [ i ] = group a ; } int group flags = group a . m group flags | group b . m group flags ; group a . m group flags = group flags ; group a . m last index = group b . m last index ; group b . m first index = group b . m last index ; destroy particle group ( group b ) ; if ( ( group flags & particle group type . b 2 solid particle group ) != 0 ) { compute depth for group ( group a ) ; } } <SENTENCE_END/>


Original Name destroy,particle,group

destroy

<SENTENCE_START> { assert ( m group count > 0 ) ; assert ( group != null ) ; if ( m world . get particle destruction listener ( ) != null ) { m world . get particle destruction listener ( ) . say goodbye ( group ) ; } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m group buffer [ i ] = null ; } if ( group . m prev != null ) { group . m prev . m next = group . m next ; } if ( group . m next != null ) { group . m next . m prev = group . m prev ; } if ( group == m group list ) { m group list = group . m next ; } -- m group count ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { assert ( m group count > 0 ) ; assert ( group != null ) ; if ( m world . get particle destruction listener ( ) != null ) { m world . get particle destruction listener ( ) . say goodbye ( group ) ; } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m group buffer [ i ] = null ; } if ( group . m prev != null ) { group . m prev . m next = group . m next ; } if ( group . m next != null ) { group . m next . m prev = group . m prev ; } if ( group == m group list ) { m group list = group . m next ; } -- m group count ; } <SENTENCE_END/>

particle

<SENTENCE_START> { assert ( m group count > 0 ) ; assert ( group != null ) ; if ( m world . get particle destruction listener ( ) != null ) { m world . get particle destruction listener ( ) . say goodbye ( group ) ; } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m group buffer [ i ] = null ; } if ( group . m prev != null ) { group . m prev . m next = group . m next ; } if ( group . m next != null ) { group . m next . m prev = group . m prev ; } if ( group == m group list ) { m group list = group . m next ; } -- m group count ; } <SENTENCE_END/>

(Copy Probability: 21.9%)

<SENTENCE_START> { assert ( m group count > 0 ) ; assert ( group != null ) ; if ( m world . get particle destruction listener ( ) != null ) { m world . get particle destruction listener ( ) . say goodbye ( group ) ; } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m group buffer [ i ] = null ; } if ( group . m prev != null ) { group . m prev . m next = group . m next ; } if ( group . m next != null ) { group . m next . m prev = group . m prev ; } if ( group == m group list ) { m group list = group . m next ; } -- m group count ; } <SENTENCE_END/>

group

<SENTENCE_START> { assert ( m group count > 0 ) ; assert ( group != null ) ; if ( m world . get particle destruction listener ( ) != null ) { m world . get particle destruction listener ( ) . say goodbye ( group ) ; } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m group buffer [ i ] = null ; } if ( group . m prev != null ) { group . m prev . m next = group . m next ; } if ( group . m next != null ) { group . m next . m prev = group . m prev ; } if ( group == m group list ) { m group list = group . m next ; } -- m group count ; } <SENTENCE_END/>

(Copy Probability: 25.5%)

<SENTENCE_START> { assert ( m group count > 0 ) ; assert ( group != null ) ; if ( m world . get particle destruction listener ( ) != null ) { m world . get particle destruction listener ( ) . say goodbye ( group ) ; } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m group buffer [ i ] = null ; } if ( group . m prev != null ) { group . m prev . m next = group . m next ; } if ( group . m next != null ) { group . m next . m prev = group . m prev ; } if ( group == m group list ) { m group list = group . m next ; } -- m group count ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { assert ( m group count > 0 ) ; assert ( group != null ) ; if ( m world . get particle destruction listener ( ) != null ) { m world . get particle destruction listener ( ) . say goodbye ( group ) ; } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m group buffer [ i ] = null ; } if ( group . m prev != null ) { group . m prev . m next = group . m next ; } if ( group . m next != null ) { group . m next . m prev = group . m prev ; } if ( group == m group list ) { m group list = group . m next ; } -- m group count ; } <SENTENCE_END/>

(Copy Probability: 8.8%)

<SENTENCE_START> { assert ( m group count > 0 ) ; assert ( group != null ) ; if ( m world . get particle destruction listener ( ) != null ) { m world . get particle destruction listener ( ) . say goodbye ( group ) ; } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m group buffer [ i ] = null ; } if ( group . m prev != null ) { group . m prev . m next = group . m next ; } if ( group . m next != null ) { group . m next . m prev = group . m prev ; } if ( group == m group list ) { m group list = group . m next ; } -- m group count ; } <SENTENCE_END/>


Original Name compute,depth,for,group

compute

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

depth

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

(Copy Probability: 22.8%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

for

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

(Copy Probability: 13.4%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

group

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

(Copy Probability: 5.0%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>

(Copy Probability: 4.6%)

<SENTENCE_START> { for ( int i = group . m first index ; i < group . m last index ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } } m depth buffer = request particle buffer ( m depth buffer ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float w = m accumulation buffer [ i ] ; m depth buffer [ i ] = w < 0.8f ? 0 : float . max value ; } int interation count = group . get particle count ( ) ; for ( int t = 0 ; t < interation count ; t ++ ) { boolean updated = false ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( a >= group . m first index && a < group . m last index && b >= group . m first index && b < group . m last index ) { float r = 1 - contact . weight ; float ap 0 = m depth buffer [ a ] ; float bp 0 = m depth buffer [ b ] ; float ap 1 = bp 0 + r ; float bp 1 = ap 0 + r ; if ( ap 0 > ap 1 ) { m depth buffer [ a ] = ap 1 ; updated = true ; } if ( bp 0 > bp 1 ) { m depth buffer [ b ] = bp 1 ; updated = true ; } } } if ( ! updated ) { break ; } } for ( int i = group . m first index ; i < group . m last index ; i ++ ) { float p = m depth buffer [ i ] ; if ( p < float . max value ) { m depth buffer [ i ] *= m particle diameter ; } else { m depth buffer [ i ] = 0 ; } } } <SENTENCE_END/>


Original Name add,contact

add

<SENTENCE_START> { assert ( a != b ) ; vec 2 pa = m position buffer . data [ a ] ; vec 2 pb = m position buffer . data [ b ] ; float dx = pb . x - pa . x ; float dy = pb . y - pa . y ; float d 2 = dx * dx + dy * dy ; if ( d 2 < m squared diameter ) { if ( m contact count >= m contact capacity ) { int old capacity = m contact capacity ; int new capacity = m contact count != 0 ? 2 * m contact count : settings . min particle buffer capacity ; m contact buffer = buffer utils . reallocate buffer ( particle contact . class , m contact buffer , old capacity , new capacity ) ; m contact capacity = new capacity ; } float inv d = d 2 != 0 ? math utils . sqrt ( 1 / d 2 ) : float . max value ; particle contact contact = m contact buffer [ m contact count ] ; contact . index a = a ; contact . index b = b ; contact . flags = m flags buffer . data [ a ] | m flags buffer . data [ b ] ; contact . weight = 1 - d 2 * inv d * m inverse diameter ; contact . normal . x = inv d * dx ; contact . normal . y = inv d * dy ; m contact count ++ ; } } <SENTENCE_END/>

(Copy Probability: 4.5%)

<SENTENCE_START> { assert ( a != b ) ; vec 2 pa = m position buffer . data [ a ] ; vec 2 pb = m position buffer . data [ b ] ; float dx = pb . x - pa . x ; float dy = pb . y - pa . y ; float d 2 = dx * dx + dy * dy ; if ( d 2 < m squared diameter ) { if ( m contact count >= m contact capacity ) { int old capacity = m contact capacity ; int new capacity = m contact count != 0 ? 2 * m contact count : settings . min particle buffer capacity ; m contact buffer = buffer utils . reallocate buffer ( particle contact . class , m contact buffer , old capacity , new capacity ) ; m contact capacity = new capacity ; } float inv d = d 2 != 0 ? math utils . sqrt ( 1 / d 2 ) : float . max value ; particle contact contact = m contact buffer [ m contact count ] ; contact . index a = a ; contact . index b = b ; contact . flags = m flags buffer . data [ a ] | m flags buffer . data [ b ] ; contact . weight = 1 - d 2 * inv d * m inverse diameter ; contact . normal . x = inv d * dx ; contact . normal . y = inv d * dy ; m contact count ++ ; } } <SENTENCE_END/>

contact

<SENTENCE_START> { assert ( a != b ) ; vec 2 pa = m position buffer . data [ a ] ; vec 2 pb = m position buffer . data [ b ] ; float dx = pb . x - pa . x ; float dy = pb . y - pa . y ; float d 2 = dx * dx + dy * dy ; if ( d 2 < m squared diameter ) { if ( m contact count >= m contact capacity ) { int old capacity = m contact capacity ; int new capacity = m contact count != 0 ? 2 * m contact count : settings . min particle buffer capacity ; m contact buffer = buffer utils . reallocate buffer ( particle contact . class , m contact buffer , old capacity , new capacity ) ; m contact capacity = new capacity ; } float inv d = d 2 != 0 ? math utils . sqrt ( 1 / d 2 ) : float . max value ; particle contact contact = m contact buffer [ m contact count ] ; contact . index a = a ; contact . index b = b ; contact . flags = m flags buffer . data [ a ] | m flags buffer . data [ b ] ; contact . weight = 1 - d 2 * inv d * m inverse diameter ; contact . normal . x = inv d * dx ; contact . normal . y = inv d * dy ; m contact count ++ ; } } <SENTENCE_END/>

(Copy Probability: 32.7%)

<SENTENCE_START> { assert ( a != b ) ; vec 2 pa = m position buffer . data [ a ] ; vec 2 pb = m position buffer . data [ b ] ; float dx = pb . x - pa . x ; float dy = pb . y - pa . y ; float d 2 = dx * dx + dy * dy ; if ( d 2 < m squared diameter ) { if ( m contact count >= m contact capacity ) { int old capacity = m contact capacity ; int new capacity = m contact count != 0 ? 2 * m contact count : settings . min particle buffer capacity ; m contact buffer = buffer utils . reallocate buffer ( particle contact . class , m contact buffer , old capacity , new capacity ) ; m contact capacity = new capacity ; } float inv d = d 2 != 0 ? math utils . sqrt ( 1 / d 2 ) : float . max value ; particle contact contact = m contact buffer [ m contact count ] ; contact . index a = a ; contact . index b = b ; contact . flags = m flags buffer . data [ a ] | m flags buffer . data [ b ] ; contact . weight = 1 - d 2 * inv d * m inverse diameter ; contact . normal . x = inv d * dx ; contact . normal . y = inv d * dy ; m contact count ++ ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { assert ( a != b ) ; vec 2 pa = m position buffer . data [ a ] ; vec 2 pb = m position buffer . data [ b ] ; float dx = pb . x - pa . x ; float dy = pb . y - pa . y ; float d 2 = dx * dx + dy * dy ; if ( d 2 < m squared diameter ) { if ( m contact count >= m contact capacity ) { int old capacity = m contact capacity ; int new capacity = m contact count != 0 ? 2 * m contact count : settings . min particle buffer capacity ; m contact buffer = buffer utils . reallocate buffer ( particle contact . class , m contact buffer , old capacity , new capacity ) ; m contact capacity = new capacity ; } float inv d = d 2 != 0 ? math utils . sqrt ( 1 / d 2 ) : float . max value ; particle contact contact = m contact buffer [ m contact count ] ; contact . index a = a ; contact . index b = b ; contact . flags = m flags buffer . data [ a ] | m flags buffer . data [ b ] ; contact . weight = 1 - d 2 * inv d * m inverse diameter ; contact . normal . x = inv d * dx ; contact . normal . y = inv d * dy ; m contact count ++ ; } } <SENTENCE_END/>

(Copy Probability: 15.1%)

<SENTENCE_START> { assert ( a != b ) ; vec 2 pa = m position buffer . data [ a ] ; vec 2 pb = m position buffer . data [ b ] ; float dx = pb . x - pa . x ; float dy = pb . y - pa . y ; float d 2 = dx * dx + dy * dy ; if ( d 2 < m squared diameter ) { if ( m contact count >= m contact capacity ) { int old capacity = m contact capacity ; int new capacity = m contact count != 0 ? 2 * m contact count : settings . min particle buffer capacity ; m contact buffer = buffer utils . reallocate buffer ( particle contact . class , m contact buffer , old capacity , new capacity ) ; m contact capacity = new capacity ; } float inv d = d 2 != 0 ? math utils . sqrt ( 1 / d 2 ) : float . max value ; particle contact contact = m contact buffer [ m contact count ] ; contact . index a = a ; contact . index b = b ; contact . flags = m flags buffer . data [ a ] | m flags buffer . data [ b ] ; contact . weight = 1 - d 2 * inv d * m inverse diameter ; contact . normal . x = inv d * dx ; contact . normal . y = inv d * dy ; m contact count ++ ; } } <SENTENCE_END/>


Original Name update,contacts

update

<SENTENCE_START> { for ( int p = 0 ; p < m proxy count ; p ++ ) { proxy proxy = m proxy buffer [ p ] ; int i = proxy . index ; vec 2 pos = m position buffer . data [ i ] ; proxy . tag = compute tag ( m inverse diameter * pos . x , m inverse diameter * pos . y ) ; } arrays . sort ( m proxy buffer , 0 , m proxy count ) ; m contact count = 0 ; int c index = 0 ; for ( int i = 0 ; i < m proxy count ; i ++ ) { proxy a = m proxy buffer [ i ] ; long right tag = compute relative tag ( a . tag , 1 , 0 ) ; for ( int j = i + 1 ; j < m proxy count ; j ++ ) { proxy b = m proxy buffer [ j ] ; if ( right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } long bottom left tag = compute relative tag ( a . tag , - 1 , 1 ) ; for ( ; c index < m proxy count ; c index ++ ) { proxy c = m proxy buffer [ c index ] ; if ( bottom left tag <= c . tag ) { break ; } } long bottom right tag = compute relative tag ( a . tag , 1 , 1 ) ; for ( int b index = c index ; b index < m proxy count ; b index ++ ) { proxy b = m proxy buffer [ b index ] ; if ( bottom right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } } if ( except zombie ) { int j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( ( m contact buffer [ i ] . flags & particle type . b 2 zombie particle ) != 0 ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; } } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { for ( int p = 0 ; p < m proxy count ; p ++ ) { proxy proxy = m proxy buffer [ p ] ; int i = proxy . index ; vec 2 pos = m position buffer . data [ i ] ; proxy . tag = compute tag ( m inverse diameter * pos . x , m inverse diameter * pos . y ) ; } arrays . sort ( m proxy buffer , 0 , m proxy count ) ; m contact count = 0 ; int c index = 0 ; for ( int i = 0 ; i < m proxy count ; i ++ ) { proxy a = m proxy buffer [ i ] ; long right tag = compute relative tag ( a . tag , 1 , 0 ) ; for ( int j = i + 1 ; j < m proxy count ; j ++ ) { proxy b = m proxy buffer [ j ] ; if ( right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } long bottom left tag = compute relative tag ( a . tag , - 1 , 1 ) ; for ( ; c index < m proxy count ; c index ++ ) { proxy c = m proxy buffer [ c index ] ; if ( bottom left tag <= c . tag ) { break ; } } long bottom right tag = compute relative tag ( a . tag , 1 , 1 ) ; for ( int b index = c index ; b index < m proxy count ; b index ++ ) { proxy b = m proxy buffer [ b index ] ; if ( bottom right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } } if ( except zombie ) { int j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( ( m contact buffer [ i ] . flags & particle type . b 2 zombie particle ) != 0 ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; } } <SENTENCE_END/>

contacts

<SENTENCE_START> { for ( int p = 0 ; p < m proxy count ; p ++ ) { proxy proxy = m proxy buffer [ p ] ; int i = proxy . index ; vec 2 pos = m position buffer . data [ i ] ; proxy . tag = compute tag ( m inverse diameter * pos . x , m inverse diameter * pos . y ) ; } arrays . sort ( m proxy buffer , 0 , m proxy count ) ; m contact count = 0 ; int c index = 0 ; for ( int i = 0 ; i < m proxy count ; i ++ ) { proxy a = m proxy buffer [ i ] ; long right tag = compute relative tag ( a . tag , 1 , 0 ) ; for ( int j = i + 1 ; j < m proxy count ; j ++ ) { proxy b = m proxy buffer [ j ] ; if ( right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } long bottom left tag = compute relative tag ( a . tag , - 1 , 1 ) ; for ( ; c index < m proxy count ; c index ++ ) { proxy c = m proxy buffer [ c index ] ; if ( bottom left tag <= c . tag ) { break ; } } long bottom right tag = compute relative tag ( a . tag , 1 , 1 ) ; for ( int b index = c index ; b index < m proxy count ; b index ++ ) { proxy b = m proxy buffer [ b index ] ; if ( bottom right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } } if ( except zombie ) { int j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( ( m contact buffer [ i ] . flags & particle type . b 2 zombie particle ) != 0 ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; } } <SENTENCE_END/>

(Copy Probability: 8.3%)

<SENTENCE_START> { for ( int p = 0 ; p < m proxy count ; p ++ ) { proxy proxy = m proxy buffer [ p ] ; int i = proxy . index ; vec 2 pos = m position buffer . data [ i ] ; proxy . tag = compute tag ( m inverse diameter * pos . x , m inverse diameter * pos . y ) ; } arrays . sort ( m proxy buffer , 0 , m proxy count ) ; m contact count = 0 ; int c index = 0 ; for ( int i = 0 ; i < m proxy count ; i ++ ) { proxy a = m proxy buffer [ i ] ; long right tag = compute relative tag ( a . tag , 1 , 0 ) ; for ( int j = i + 1 ; j < m proxy count ; j ++ ) { proxy b = m proxy buffer [ j ] ; if ( right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } long bottom left tag = compute relative tag ( a . tag , - 1 , 1 ) ; for ( ; c index < m proxy count ; c index ++ ) { proxy c = m proxy buffer [ c index ] ; if ( bottom left tag <= c . tag ) { break ; } } long bottom right tag = compute relative tag ( a . tag , 1 , 1 ) ; for ( int b index = c index ; b index < m proxy count ; b index ++ ) { proxy b = m proxy buffer [ b index ] ; if ( bottom right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } } if ( except zombie ) { int j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( ( m contact buffer [ i ] . flags & particle type . b 2 zombie particle ) != 0 ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( int p = 0 ; p < m proxy count ; p ++ ) { proxy proxy = m proxy buffer [ p ] ; int i = proxy . index ; vec 2 pos = m position buffer . data [ i ] ; proxy . tag = compute tag ( m inverse diameter * pos . x , m inverse diameter * pos . y ) ; } arrays . sort ( m proxy buffer , 0 , m proxy count ) ; m contact count = 0 ; int c index = 0 ; for ( int i = 0 ; i < m proxy count ; i ++ ) { proxy a = m proxy buffer [ i ] ; long right tag = compute relative tag ( a . tag , 1 , 0 ) ; for ( int j = i + 1 ; j < m proxy count ; j ++ ) { proxy b = m proxy buffer [ j ] ; if ( right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } long bottom left tag = compute relative tag ( a . tag , - 1 , 1 ) ; for ( ; c index < m proxy count ; c index ++ ) { proxy c = m proxy buffer [ c index ] ; if ( bottom left tag <= c . tag ) { break ; } } long bottom right tag = compute relative tag ( a . tag , 1 , 1 ) ; for ( int b index = c index ; b index < m proxy count ; b index ++ ) { proxy b = m proxy buffer [ b index ] ; if ( bottom right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } } if ( except zombie ) { int j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( ( m contact buffer [ i ] . flags & particle type . b 2 zombie particle ) != 0 ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; } } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { for ( int p = 0 ; p < m proxy count ; p ++ ) { proxy proxy = m proxy buffer [ p ] ; int i = proxy . index ; vec 2 pos = m position buffer . data [ i ] ; proxy . tag = compute tag ( m inverse diameter * pos . x , m inverse diameter * pos . y ) ; } arrays . sort ( m proxy buffer , 0 , m proxy count ) ; m contact count = 0 ; int c index = 0 ; for ( int i = 0 ; i < m proxy count ; i ++ ) { proxy a = m proxy buffer [ i ] ; long right tag = compute relative tag ( a . tag , 1 , 0 ) ; for ( int j = i + 1 ; j < m proxy count ; j ++ ) { proxy b = m proxy buffer [ j ] ; if ( right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } long bottom left tag = compute relative tag ( a . tag , - 1 , 1 ) ; for ( ; c index < m proxy count ; c index ++ ) { proxy c = m proxy buffer [ c index ] ; if ( bottom left tag <= c . tag ) { break ; } } long bottom right tag = compute relative tag ( a . tag , 1 , 1 ) ; for ( int b index = c index ; b index < m proxy count ; b index ++ ) { proxy b = m proxy buffer [ b index ] ; if ( bottom right tag < b . tag ) { break ; } add contact ( a . index , b . index ) ; } } if ( except zombie ) { int j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( ( m contact buffer [ i ] . flags & particle type . b 2 zombie particle ) != 0 ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; } } <SENTENCE_END/>


Original Name update,body,contacts

update

<SENTENCE_START> { final aabb aabb = temp ; aabb . lower bound . x = float . max value ; aabb . lower bound . y = float . max value ; aabb . upper bound . x = - float . max value ; aabb . upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 p = m position buffer . data [ i ] ; vec 2 . min to out ( aabb . lower bound , p , aabb . lower bound ) ; vec 2 . max to out ( aabb . upper bound , p , aabb . upper bound ) ; } aabb . lower bound . x -= m particle diameter ; aabb . lower bound . y -= m particle diameter ; aabb . upper bound . x += m particle diameter ; aabb . upper bound . y += m particle diameter ; m body contact count = 0 ; ubccallback . system = this ; m world . query aabb ( ubccallback , aabb ) ; } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { final aabb aabb = temp ; aabb . lower bound . x = float . max value ; aabb . lower bound . y = float . max value ; aabb . upper bound . x = - float . max value ; aabb . upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 p = m position buffer . data [ i ] ; vec 2 . min to out ( aabb . lower bound , p , aabb . lower bound ) ; vec 2 . max to out ( aabb . upper bound , p , aabb . upper bound ) ; } aabb . lower bound . x -= m particle diameter ; aabb . lower bound . y -= m particle diameter ; aabb . upper bound . x += m particle diameter ; aabb . upper bound . y += m particle diameter ; m body contact count = 0 ; ubccallback . system = this ; m world . query aabb ( ubccallback , aabb ) ; } <SENTENCE_END/>

body

<SENTENCE_START> { final aabb aabb = temp ; aabb . lower bound . x = float . max value ; aabb . lower bound . y = float . max value ; aabb . upper bound . x = - float . max value ; aabb . upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 p = m position buffer . data [ i ] ; vec 2 . min to out ( aabb . lower bound , p , aabb . lower bound ) ; vec 2 . max to out ( aabb . upper bound , p , aabb . upper bound ) ; } aabb . lower bound . x -= m particle diameter ; aabb . lower bound . y -= m particle diameter ; aabb . upper bound . x += m particle diameter ; aabb . upper bound . y += m particle diameter ; m body contact count = 0 ; ubccallback . system = this ; m world . query aabb ( ubccallback , aabb ) ; } <SENTENCE_END/>

(Copy Probability: 15.8%)

<SENTENCE_START> { final aabb aabb = temp ; aabb . lower bound . x = float . max value ; aabb . lower bound . y = float . max value ; aabb . upper bound . x = - float . max value ; aabb . upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 p = m position buffer . data [ i ] ; vec 2 . min to out ( aabb . lower bound , p , aabb . lower bound ) ; vec 2 . max to out ( aabb . upper bound , p , aabb . upper bound ) ; } aabb . lower bound . x -= m particle diameter ; aabb . lower bound . y -= m particle diameter ; aabb . upper bound . x += m particle diameter ; aabb . upper bound . y += m particle diameter ; m body contact count = 0 ; ubccallback . system = this ; m world . query aabb ( ubccallback , aabb ) ; } <SENTENCE_END/>

contacts

<SENTENCE_START> { final aabb aabb = temp ; aabb . lower bound . x = float . max value ; aabb . lower bound . y = float . max value ; aabb . upper bound . x = - float . max value ; aabb . upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 p = m position buffer . data [ i ] ; vec 2 . min to out ( aabb . lower bound , p , aabb . lower bound ) ; vec 2 . max to out ( aabb . upper bound , p , aabb . upper bound ) ; } aabb . lower bound . x -= m particle diameter ; aabb . lower bound . y -= m particle diameter ; aabb . upper bound . x += m particle diameter ; aabb . upper bound . y += m particle diameter ; m body contact count = 0 ; ubccallback . system = this ; m world . query aabb ( ubccallback , aabb ) ; } <SENTENCE_END/>

(Copy Probability: 13.1%)

<SENTENCE_START> { final aabb aabb = temp ; aabb . lower bound . x = float . max value ; aabb . lower bound . y = float . max value ; aabb . upper bound . x = - float . max value ; aabb . upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 p = m position buffer . data [ i ] ; vec 2 . min to out ( aabb . lower bound , p , aabb . lower bound ) ; vec 2 . max to out ( aabb . upper bound , p , aabb . upper bound ) ; } aabb . lower bound . x -= m particle diameter ; aabb . lower bound . y -= m particle diameter ; aabb . upper bound . x += m particle diameter ; aabb . upper bound . y += m particle diameter ; m body contact count = 0 ; ubccallback . system = this ; m world . query aabb ( ubccallback , aabb ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { final aabb aabb = temp ; aabb . lower bound . x = float . max value ; aabb . lower bound . y = float . max value ; aabb . upper bound . x = - float . max value ; aabb . upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 p = m position buffer . data [ i ] ; vec 2 . min to out ( aabb . lower bound , p , aabb . lower bound ) ; vec 2 . max to out ( aabb . upper bound , p , aabb . upper bound ) ; } aabb . lower bound . x -= m particle diameter ; aabb . lower bound . y -= m particle diameter ; aabb . upper bound . x += m particle diameter ; aabb . upper bound . y += m particle diameter ; m body contact count = 0 ; ubccallback . system = this ; m world . query aabb ( ubccallback , aabb ) ; } <SENTENCE_END/>

(Copy Probability: 4.8%)

<SENTENCE_START> { final aabb aabb = temp ; aabb . lower bound . x = float . max value ; aabb . lower bound . y = float . max value ; aabb . upper bound . x = - float . max value ; aabb . upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 p = m position buffer . data [ i ] ; vec 2 . min to out ( aabb . lower bound , p , aabb . lower bound ) ; vec 2 . max to out ( aabb . upper bound , p , aabb . upper bound ) ; } aabb . lower bound . x -= m particle diameter ; aabb . lower bound . y -= m particle diameter ; aabb . upper bound . x += m particle diameter ; aabb . upper bound . y += m particle diameter ; m body contact count = 0 ; ubccallback . system = this ; m world . query aabb ( ubccallback , aabb ) ; } <SENTENCE_END/>


Original Name solve,collision

solve

<SENTENCE_START> { final aabb aabb = temp ; final vec 2 lower bound = aabb . lower bound ; final vec 2 upper bound = aabb . upper bound ; lower bound . x = float . max value ; lower bound . y = float . max value ; upper bound . x = - float . max value ; upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { final vec 2 v = m velocity buffer . data [ i ] ; final vec 2 p 1 = m position buffer . data [ i ] ; final float p 1 x = p 1 . x ; final float p 1 y = p 1 . y ; final float p 2 x = p 1 x + step . dt * v . x ; final float p 2 y = p 1 y + step . dt * v . y ; final float bx = p 1 x < p 2 x ? p 1 x : p 2 x ; final float by = p 1 y < p 2 y ? p 1 y : p 2 y ; lower bound . x = lower bound . x < bx ? lower bound . x : bx ; lower bound . y = lower bound . y < by ? lower bound . y : by ; final float b 1 x = p 1 x > p 2 x ? p 1 x : p 2 x ; final float b 1 y = p 1 y > p 2 y ? p 1 y : p 2 y ; upper bound . x = upper bound . x > b 1 x ? upper bound . x : b 1 x ; upper bound . y = upper bound . y > b 1 y ? upper bound . y : b 1 y ; } sccallback . step = step ; sccallback . system = this ; m world . query aabb ( sccallback , aabb ) ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { final aabb aabb = temp ; final vec 2 lower bound = aabb . lower bound ; final vec 2 upper bound = aabb . upper bound ; lower bound . x = float . max value ; lower bound . y = float . max value ; upper bound . x = - float . max value ; upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { final vec 2 v = m velocity buffer . data [ i ] ; final vec 2 p 1 = m position buffer . data [ i ] ; final float p 1 x = p 1 . x ; final float p 1 y = p 1 . y ; final float p 2 x = p 1 x + step . dt * v . x ; final float p 2 y = p 1 y + step . dt * v . y ; final float bx = p 1 x < p 2 x ? p 1 x : p 2 x ; final float by = p 1 y < p 2 y ? p 1 y : p 2 y ; lower bound . x = lower bound . x < bx ? lower bound . x : bx ; lower bound . y = lower bound . y < by ? lower bound . y : by ; final float b 1 x = p 1 x > p 2 x ? p 1 x : p 2 x ; final float b 1 y = p 1 y > p 2 y ? p 1 y : p 2 y ; upper bound . x = upper bound . x > b 1 x ? upper bound . x : b 1 x ; upper bound . y = upper bound . y > b 1 y ? upper bound . y : b 1 y ; } sccallback . step = step ; sccallback . system = this ; m world . query aabb ( sccallback , aabb ) ; } <SENTENCE_END/>

collision

<SENTENCE_START> { final aabb aabb = temp ; final vec 2 lower bound = aabb . lower bound ; final vec 2 upper bound = aabb . upper bound ; lower bound . x = float . max value ; lower bound . y = float . max value ; upper bound . x = - float . max value ; upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { final vec 2 v = m velocity buffer . data [ i ] ; final vec 2 p 1 = m position buffer . data [ i ] ; final float p 1 x = p 1 . x ; final float p 1 y = p 1 . y ; final float p 2 x = p 1 x + step . dt * v . x ; final float p 2 y = p 1 y + step . dt * v . y ; final float bx = p 1 x < p 2 x ? p 1 x : p 2 x ; final float by = p 1 y < p 2 y ? p 1 y : p 2 y ; lower bound . x = lower bound . x < bx ? lower bound . x : bx ; lower bound . y = lower bound . y < by ? lower bound . y : by ; final float b 1 x = p 1 x > p 2 x ? p 1 x : p 2 x ; final float b 1 y = p 1 y > p 2 y ? p 1 y : p 2 y ; upper bound . x = upper bound . x > b 1 x ? upper bound . x : b 1 x ; upper bound . y = upper bound . y > b 1 y ? upper bound . y : b 1 y ; } sccallback . step = step ; sccallback . system = this ; m world . query aabb ( sccallback , aabb ) ; } <SENTENCE_END/>

(Copy Probability: 15.8%)

<SENTENCE_START> { final aabb aabb = temp ; final vec 2 lower bound = aabb . lower bound ; final vec 2 upper bound = aabb . upper bound ; lower bound . x = float . max value ; lower bound . y = float . max value ; upper bound . x = - float . max value ; upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { final vec 2 v = m velocity buffer . data [ i ] ; final vec 2 p 1 = m position buffer . data [ i ] ; final float p 1 x = p 1 . x ; final float p 1 y = p 1 . y ; final float p 2 x = p 1 x + step . dt * v . x ; final float p 2 y = p 1 y + step . dt * v . y ; final float bx = p 1 x < p 2 x ? p 1 x : p 2 x ; final float by = p 1 y < p 2 y ? p 1 y : p 2 y ; lower bound . x = lower bound . x < bx ? lower bound . x : bx ; lower bound . y = lower bound . y < by ? lower bound . y : by ; final float b 1 x = p 1 x > p 2 x ? p 1 x : p 2 x ; final float b 1 y = p 1 y > p 2 y ? p 1 y : p 2 y ; upper bound . x = upper bound . x > b 1 x ? upper bound . x : b 1 x ; upper bound . y = upper bound . y > b 1 y ? upper bound . y : b 1 y ; } sccallback . step = step ; sccallback . system = this ; m world . query aabb ( sccallback , aabb ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { final aabb aabb = temp ; final vec 2 lower bound = aabb . lower bound ; final vec 2 upper bound = aabb . upper bound ; lower bound . x = float . max value ; lower bound . y = float . max value ; upper bound . x = - float . max value ; upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { final vec 2 v = m velocity buffer . data [ i ] ; final vec 2 p 1 = m position buffer . data [ i ] ; final float p 1 x = p 1 . x ; final float p 1 y = p 1 . y ; final float p 2 x = p 1 x + step . dt * v . x ; final float p 2 y = p 1 y + step . dt * v . y ; final float bx = p 1 x < p 2 x ? p 1 x : p 2 x ; final float by = p 1 y < p 2 y ? p 1 y : p 2 y ; lower bound . x = lower bound . x < bx ? lower bound . x : bx ; lower bound . y = lower bound . y < by ? lower bound . y : by ; final float b 1 x = p 1 x > p 2 x ? p 1 x : p 2 x ; final float b 1 y = p 1 y > p 2 y ? p 1 y : p 2 y ; upper bound . x = upper bound . x > b 1 x ? upper bound . x : b 1 x ; upper bound . y = upper bound . y > b 1 y ? upper bound . y : b 1 y ; } sccallback . step = step ; sccallback . system = this ; m world . query aabb ( sccallback , aabb ) ; } <SENTENCE_END/>

(Copy Probability: 8.2%)

<SENTENCE_START> { final aabb aabb = temp ; final vec 2 lower bound = aabb . lower bound ; final vec 2 upper bound = aabb . upper bound ; lower bound . x = float . max value ; lower bound . y = float . max value ; upper bound . x = - float . max value ; upper bound . y = - float . max value ; for ( int i = 0 ; i < m count ; i ++ ) { final vec 2 v = m velocity buffer . data [ i ] ; final vec 2 p 1 = m position buffer . data [ i ] ; final float p 1 x = p 1 . x ; final float p 1 y = p 1 . y ; final float p 2 x = p 1 x + step . dt * v . x ; final float p 2 y = p 1 y + step . dt * v . y ; final float bx = p 1 x < p 2 x ? p 1 x : p 2 x ; final float by = p 1 y < p 2 y ? p 1 y : p 2 y ; lower bound . x = lower bound . x < bx ? lower bound . x : bx ; lower bound . y = lower bound . y < by ? lower bound . y : by ; final float b 1 x = p 1 x > p 2 x ? p 1 x : p 2 x ; final float b 1 y = p 1 y > p 2 y ? p 1 y : p 2 y ; upper bound . x = upper bound . x > b 1 x ? upper bound . x : b 1 x ; upper bound . y = upper bound . y > b 1 y ? upper bound . y : b 1 y ; } sccallback . step = step ; sccallback . system = this ; m world . query aabb ( sccallback , aabb ) ; } <SENTENCE_END/>


Original Name solve

solve

<SENTENCE_START> { ++ m timestamp ; if ( m count == 0 ) { return ; } m all particle flags = 0 ; for ( int i = 0 ; i < m count ; i ++ ) { m all particle flags |= m flags buffer . data [ i ] ; } if ( ( m all particle flags & particle type . b 2 zombie particle ) != 0 ) { solve zombie ( ) ; } if ( m count == 0 ) { return ; } m all group flags = 0 ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { m all group flags |= group . m group flags ; } final float gravityx = step . dt * m gravity scale * m world . get gravity ( ) . x ; final float gravityy = step . dt * m gravity scale * m world . get gravity ( ) . y ; float critical velocyty squared = get critical velocity squared ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 v = m velocity buffer . data [ i ] ; v . x += gravityx ; v . y += gravityy ; float v 2 = v . x * v . x + v . y * v . y ; if ( v 2 > critical velocyty squared ) { float a = v 2 == 0 ? float . max value : math utils . sqrt ( critical velocyty squared / v 2 ) ; v . x *= a ; v . y *= a ; } } solve collision ( step ) ; if ( ( m all group flags & particle group type . b 2 rigid particle group ) != 0 ) { solve rigid ( step ) ; } if ( ( m all particle flags & particle type . b 2 wall particle ) != 0 ) { solve wall ( step ) ; } for ( int i = 0 ; i < m count ; i ++ ) { vec 2 pos = m position buffer . data [ i ] ; vec 2 vel = m velocity buffer . data [ i ] ; pos . x += step . dt * vel . x ; pos . y += step . dt * vel . y ; } update body contacts ( ) ; update contacts ( false ) ; if ( ( m all particle flags & particle type . b 2 viscous particle ) != 0 ) { solve viscous ( step ) ; } if ( ( m all particle flags & particle type . b 2 powder particle ) != 0 ) { solve powder ( step ) ; } if ( ( m all particle flags & particle type . b 2 tensile particle ) != 0 ) { solve tensile ( step ) ; } if ( ( m all particle flags & particle type . b 2 elastic particle ) != 0 ) { solve elastic ( step ) ; } if ( ( m all particle flags & particle type . b 2 spring particle ) != 0 ) { solve spring ( step ) ; } if ( ( m all group flags & particle group type . b 2 solid particle group ) != 0 ) { solve solid ( step ) ; } if ( ( m all particle flags & particle type . b 2 color mixing particle ) != 0 ) { solve color mixing ( step ) ; } solve pressure ( step ) ; solve damping ( step ) ; } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { ++ m timestamp ; if ( m count == 0 ) { return ; } m all particle flags = 0 ; for ( int i = 0 ; i < m count ; i ++ ) { m all particle flags |= m flags buffer . data [ i ] ; } if ( ( m all particle flags & particle type . b 2 zombie particle ) != 0 ) { solve zombie ( ) ; } if ( m count == 0 ) { return ; } m all group flags = 0 ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { m all group flags |= group . m group flags ; } final float gravityx = step . dt * m gravity scale * m world . get gravity ( ) . x ; final float gravityy = step . dt * m gravity scale * m world . get gravity ( ) . y ; float critical velocyty squared = get critical velocity squared ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 v = m velocity buffer . data [ i ] ; v . x += gravityx ; v . y += gravityy ; float v 2 = v . x * v . x + v . y * v . y ; if ( v 2 > critical velocyty squared ) { float a = v 2 == 0 ? float . max value : math utils . sqrt ( critical velocyty squared / v 2 ) ; v . x *= a ; v . y *= a ; } } solve collision ( step ) ; if ( ( m all group flags & particle group type . b 2 rigid particle group ) != 0 ) { solve rigid ( step ) ; } if ( ( m all particle flags & particle type . b 2 wall particle ) != 0 ) { solve wall ( step ) ; } for ( int i = 0 ; i < m count ; i ++ ) { vec 2 pos = m position buffer . data [ i ] ; vec 2 vel = m velocity buffer . data [ i ] ; pos . x += step . dt * vel . x ; pos . y += step . dt * vel . y ; } update body contacts ( ) ; update contacts ( false ) ; if ( ( m all particle flags & particle type . b 2 viscous particle ) != 0 ) { solve viscous ( step ) ; } if ( ( m all particle flags & particle type . b 2 powder particle ) != 0 ) { solve powder ( step ) ; } if ( ( m all particle flags & particle type . b 2 tensile particle ) != 0 ) { solve tensile ( step ) ; } if ( ( m all particle flags & particle type . b 2 elastic particle ) != 0 ) { solve elastic ( step ) ; } if ( ( m all particle flags & particle type . b 2 spring particle ) != 0 ) { solve spring ( step ) ; } if ( ( m all group flags & particle group type . b 2 solid particle group ) != 0 ) { solve solid ( step ) ; } if ( ( m all particle flags & particle type . b 2 color mixing particle ) != 0 ) { solve color mixing ( step ) ; } solve pressure ( step ) ; solve damping ( step ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { ++ m timestamp ; if ( m count == 0 ) { return ; } m all particle flags = 0 ; for ( int i = 0 ; i < m count ; i ++ ) { m all particle flags |= m flags buffer . data [ i ] ; } if ( ( m all particle flags & particle type . b 2 zombie particle ) != 0 ) { solve zombie ( ) ; } if ( m count == 0 ) { return ; } m all group flags = 0 ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { m all group flags |= group . m group flags ; } final float gravityx = step . dt * m gravity scale * m world . get gravity ( ) . x ; final float gravityy = step . dt * m gravity scale * m world . get gravity ( ) . y ; float critical velocyty squared = get critical velocity squared ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 v = m velocity buffer . data [ i ] ; v . x += gravityx ; v . y += gravityy ; float v 2 = v . x * v . x + v . y * v . y ; if ( v 2 > critical velocyty squared ) { float a = v 2 == 0 ? float . max value : math utils . sqrt ( critical velocyty squared / v 2 ) ; v . x *= a ; v . y *= a ; } } solve collision ( step ) ; if ( ( m all group flags & particle group type . b 2 rigid particle group ) != 0 ) { solve rigid ( step ) ; } if ( ( m all particle flags & particle type . b 2 wall particle ) != 0 ) { solve wall ( step ) ; } for ( int i = 0 ; i < m count ; i ++ ) { vec 2 pos = m position buffer . data [ i ] ; vec 2 vel = m velocity buffer . data [ i ] ; pos . x += step . dt * vel . x ; pos . y += step . dt * vel . y ; } update body contacts ( ) ; update contacts ( false ) ; if ( ( m all particle flags & particle type . b 2 viscous particle ) != 0 ) { solve viscous ( step ) ; } if ( ( m all particle flags & particle type . b 2 powder particle ) != 0 ) { solve powder ( step ) ; } if ( ( m all particle flags & particle type . b 2 tensile particle ) != 0 ) { solve tensile ( step ) ; } if ( ( m all particle flags & particle type . b 2 elastic particle ) != 0 ) { solve elastic ( step ) ; } if ( ( m all particle flags & particle type . b 2 spring particle ) != 0 ) { solve spring ( step ) ; } if ( ( m all group flags & particle group type . b 2 solid particle group ) != 0 ) { solve solid ( step ) ; } if ( ( m all particle flags & particle type . b 2 color mixing particle ) != 0 ) { solve color mixing ( step ) ; } solve pressure ( step ) ; solve damping ( step ) ; } <SENTENCE_END/>

(Copy Probability: 9.3%)

<SENTENCE_START> { ++ m timestamp ; if ( m count == 0 ) { return ; } m all particle flags = 0 ; for ( int i = 0 ; i < m count ; i ++ ) { m all particle flags |= m flags buffer . data [ i ] ; } if ( ( m all particle flags & particle type . b 2 zombie particle ) != 0 ) { solve zombie ( ) ; } if ( m count == 0 ) { return ; } m all group flags = 0 ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { m all group flags |= group . m group flags ; } final float gravityx = step . dt * m gravity scale * m world . get gravity ( ) . x ; final float gravityy = step . dt * m gravity scale * m world . get gravity ( ) . y ; float critical velocyty squared = get critical velocity squared ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { vec 2 v = m velocity buffer . data [ i ] ; v . x += gravityx ; v . y += gravityy ; float v 2 = v . x * v . x + v . y * v . y ; if ( v 2 > critical velocyty squared ) { float a = v 2 == 0 ? float . max value : math utils . sqrt ( critical velocyty squared / v 2 ) ; v . x *= a ; v . y *= a ; } } solve collision ( step ) ; if ( ( m all group flags & particle group type . b 2 rigid particle group ) != 0 ) { solve rigid ( step ) ; } if ( ( m all particle flags & particle type . b 2 wall particle ) != 0 ) { solve wall ( step ) ; } for ( int i = 0 ; i < m count ; i ++ ) { vec 2 pos = m position buffer . data [ i ] ; vec 2 vel = m velocity buffer . data [ i ] ; pos . x += step . dt * vel . x ; pos . y += step . dt * vel . y ; } update body contacts ( ) ; update contacts ( false ) ; if ( ( m all particle flags & particle type . b 2 viscous particle ) != 0 ) { solve viscous ( step ) ; } if ( ( m all particle flags & particle type . b 2 powder particle ) != 0 ) { solve powder ( step ) ; } if ( ( m all particle flags & particle type . b 2 tensile particle ) != 0 ) { solve tensile ( step ) ; } if ( ( m all particle flags & particle type . b 2 elastic particle ) != 0 ) { solve elastic ( step ) ; } if ( ( m all particle flags & particle type . b 2 spring particle ) != 0 ) { solve spring ( step ) ; } if ( ( m all group flags & particle group type . b 2 solid particle group ) != 0 ) { solve solid ( step ) ; } if ( ( m all particle flags & particle type . b 2 color mixing particle ) != 0 ) { solve color mixing ( step ) ; } solve pressure ( step ) ; solve damping ( step ) ; } <SENTENCE_END/>


Original Name solve,pressure

solve

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; float w = contact . weight ; m accumulation buffer [ a ] += w ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } if ( ( m all particle flags & k no pressure flags ) != 0 ) { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & k no pressure flags ) != 0 ) { m accumulation buffer [ i ] = 0 ; } } } float pressure per weight = m pressure strength * get critical pressure ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { float w = m accumulation buffer [ i ] ; float h = pressure per weight * math utils . max ( 0.0f , math utils . min ( w , settings . max particle weight ) - settings . min particle weight ) ; m accumulation buffer [ i ] = h ; } float velocity per pressure = step . dt / ( m density * m particle diameter ) ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; float h = m accumulation buffer [ a ] + pressure per weight * w ; final vec 2 f = temp vec ; final float coef = velocity per pressure * w * m * h ; f . x = coef * n . x ; f . y = coef * n . y ; final vec 2 vel data = m velocity buffer . data [ a ] ; final float particle inv mass = get particle inv mass ( ) ; vel data . x -= particle inv mass * f . x ; vel data . y -= particle inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float fx = velocity per pressure * w * h * n . x ; final float fy = velocity per pressure * w * h * n . y ; final vec 2 vel data a = m velocity buffer . data [ a ] ; final vec 2 vel data b = m velocity buffer . data [ b ] ; vel data a . x -= fx ; vel data a . y -= fy ; vel data b . x += fx ; vel data b . y += fy ; } } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; float w = contact . weight ; m accumulation buffer [ a ] += w ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } if ( ( m all particle flags & k no pressure flags ) != 0 ) { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & k no pressure flags ) != 0 ) { m accumulation buffer [ i ] = 0 ; } } } float pressure per weight = m pressure strength * get critical pressure ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { float w = m accumulation buffer [ i ] ; float h = pressure per weight * math utils . max ( 0.0f , math utils . min ( w , settings . max particle weight ) - settings . min particle weight ) ; m accumulation buffer [ i ] = h ; } float velocity per pressure = step . dt / ( m density * m particle diameter ) ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; float h = m accumulation buffer [ a ] + pressure per weight * w ; final vec 2 f = temp vec ; final float coef = velocity per pressure * w * m * h ; f . x = coef * n . x ; f . y = coef * n . y ; final vec 2 vel data = m velocity buffer . data [ a ] ; final float particle inv mass = get particle inv mass ( ) ; vel data . x -= particle inv mass * f . x ; vel data . y -= particle inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float fx = velocity per pressure * w * h * n . x ; final float fy = velocity per pressure * w * h * n . y ; final vec 2 vel data a = m velocity buffer . data [ a ] ; final vec 2 vel data b = m velocity buffer . data [ b ] ; vel data a . x -= fx ; vel data a . y -= fy ; vel data b . x += fx ; vel data b . y += fy ; } } <SENTENCE_END/>

pressure

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; float w = contact . weight ; m accumulation buffer [ a ] += w ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } if ( ( m all particle flags & k no pressure flags ) != 0 ) { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & k no pressure flags ) != 0 ) { m accumulation buffer [ i ] = 0 ; } } } float pressure per weight = m pressure strength * get critical pressure ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { float w = m accumulation buffer [ i ] ; float h = pressure per weight * math utils . max ( 0.0f , math utils . min ( w , settings . max particle weight ) - settings . min particle weight ) ; m accumulation buffer [ i ] = h ; } float velocity per pressure = step . dt / ( m density * m particle diameter ) ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; float h = m accumulation buffer [ a ] + pressure per weight * w ; final vec 2 f = temp vec ; final float coef = velocity per pressure * w * m * h ; f . x = coef * n . x ; f . y = coef * n . y ; final vec 2 vel data = m velocity buffer . data [ a ] ; final float particle inv mass = get particle inv mass ( ) ; vel data . x -= particle inv mass * f . x ; vel data . y -= particle inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float fx = velocity per pressure * w * h * n . x ; final float fy = velocity per pressure * w * h * n . y ; final vec 2 vel data a = m velocity buffer . data [ a ] ; final vec 2 vel data b = m velocity buffer . data [ b ] ; vel data a . x -= fx ; vel data a . y -= fy ; vel data b . x += fx ; vel data b . y += fy ; } } <SENTENCE_END/>

(Copy Probability: 9.0%)

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; float w = contact . weight ; m accumulation buffer [ a ] += w ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } if ( ( m all particle flags & k no pressure flags ) != 0 ) { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & k no pressure flags ) != 0 ) { m accumulation buffer [ i ] = 0 ; } } } float pressure per weight = m pressure strength * get critical pressure ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { float w = m accumulation buffer [ i ] ; float h = pressure per weight * math utils . max ( 0.0f , math utils . min ( w , settings . max particle weight ) - settings . min particle weight ) ; m accumulation buffer [ i ] = h ; } float velocity per pressure = step . dt / ( m density * m particle diameter ) ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; float h = m accumulation buffer [ a ] + pressure per weight * w ; final vec 2 f = temp vec ; final float coef = velocity per pressure * w * m * h ; f . x = coef * n . x ; f . y = coef * n . y ; final vec 2 vel data = m velocity buffer . data [ a ] ; final float particle inv mass = get particle inv mass ( ) ; vel data . x -= particle inv mass * f . x ; vel data . y -= particle inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float fx = velocity per pressure * w * h * n . x ; final float fy = velocity per pressure * w * h * n . y ; final vec 2 vel data a = m velocity buffer . data [ a ] ; final vec 2 vel data b = m velocity buffer . data [ b ] ; vel data a . x -= fx ; vel data a . y -= fy ; vel data b . x += fx ; vel data b . y += fy ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; float w = contact . weight ; m accumulation buffer [ a ] += w ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } if ( ( m all particle flags & k no pressure flags ) != 0 ) { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & k no pressure flags ) != 0 ) { m accumulation buffer [ i ] = 0 ; } } } float pressure per weight = m pressure strength * get critical pressure ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { float w = m accumulation buffer [ i ] ; float h = pressure per weight * math utils . max ( 0.0f , math utils . min ( w , settings . max particle weight ) - settings . min particle weight ) ; m accumulation buffer [ i ] = h ; } float velocity per pressure = step . dt / ( m density * m particle diameter ) ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; float h = m accumulation buffer [ a ] + pressure per weight * w ; final vec 2 f = temp vec ; final float coef = velocity per pressure * w * m * h ; f . x = coef * n . x ; f . y = coef * n . y ; final vec 2 vel data = m velocity buffer . data [ a ] ; final float particle inv mass = get particle inv mass ( ) ; vel data . x -= particle inv mass * f . x ; vel data . y -= particle inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float fx = velocity per pressure * w * h * n . x ; final float fy = velocity per pressure * w * h * n . y ; final vec 2 vel data a = m velocity buffer . data [ a ] ; final vec 2 vel data b = m velocity buffer . data [ b ] ; vel data a . x -= fx ; vel data a . y -= fy ; vel data b . x += fx ; vel data b . y += fy ; } } <SENTENCE_END/>

(Copy Probability: 8.8%)

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; float w = contact . weight ; m accumulation buffer [ a ] += w ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; } if ( ( m all particle flags & k no pressure flags ) != 0 ) { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & k no pressure flags ) != 0 ) { m accumulation buffer [ i ] = 0 ; } } } float pressure per weight = m pressure strength * get critical pressure ( step ) ; for ( int i = 0 ; i < m count ; i ++ ) { float w = m accumulation buffer [ i ] ; float h = pressure per weight * math utils . max ( 0.0f , math utils . min ( w , settings . max particle weight ) - settings . min particle weight ) ; m accumulation buffer [ i ] = h ; } float velocity per pressure = step . dt / ( m density * m particle diameter ) ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; float h = m accumulation buffer [ a ] + pressure per weight * w ; final vec 2 f = temp vec ; final float coef = velocity per pressure * w * m * h ; f . x = coef * n . x ; f . y = coef * n . y ; final vec 2 vel data = m velocity buffer . data [ a ] ; final float particle inv mass = get particle inv mass ( ) ; vel data . x -= particle inv mass * f . x ; vel data . y -= particle inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float fx = velocity per pressure * w * h * n . x ; final float fy = velocity per pressure * w * h * n . y ; final vec 2 vel data a = m velocity buffer . data [ a ] ; final vec 2 vel data b = m velocity buffer . data [ b ] ; vel data a . x -= fx ; vel data a . y -= fy ; vel data b . x += fx ; vel data b . y += fy ; } } <SENTENCE_END/>


Original Name solve,damping

solve

<SENTENCE_START> { float damping = m damping strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final vec 2 vel a = m velocity buffer . data [ a ] ; float vx = - b . m angular velocity * temp y + b . m linear velocity . x - vel a . x ; float vy = b . m angular velocity * temp x + b . m linear velocity . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { final vec 2 f = temp vec ; f . x = damping * w * m * vn * n . x ; f . y = damping * w * m * vn * n . y ; final float inv mass = get particle inv mass ( ) ; vel a . x += inv mass * f . x ; vel a . y += inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 vel a = m velocity buffer . data [ a ] ; final vec 2 vel b = m velocity buffer . data [ b ] ; final float vx = vel b . x - vel a . x ; final float vy = vel b . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { float fx = damping * w * vn * n . x ; float fy = damping * w * vn * n . y ; vel a . x += fx ; vel a . y += fy ; vel b . x -= fx ; vel b . y -= fy ; } } } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { float damping = m damping strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final vec 2 vel a = m velocity buffer . data [ a ] ; float vx = - b . m angular velocity * temp y + b . m linear velocity . x - vel a . x ; float vy = b . m angular velocity * temp x + b . m linear velocity . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { final vec 2 f = temp vec ; f . x = damping * w * m * vn * n . x ; f . y = damping * w * m * vn * n . y ; final float inv mass = get particle inv mass ( ) ; vel a . x += inv mass * f . x ; vel a . y += inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 vel a = m velocity buffer . data [ a ] ; final vec 2 vel b = m velocity buffer . data [ b ] ; final float vx = vel b . x - vel a . x ; final float vy = vel b . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { float fx = damping * w * vn * n . x ; float fy = damping * w * vn * n . y ; vel a . x += fx ; vel a . y += fy ; vel b . x -= fx ; vel b . y -= fy ; } } } <SENTENCE_END/>

damping

<SENTENCE_START> { float damping = m damping strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final vec 2 vel a = m velocity buffer . data [ a ] ; float vx = - b . m angular velocity * temp y + b . m linear velocity . x - vel a . x ; float vy = b . m angular velocity * temp x + b . m linear velocity . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { final vec 2 f = temp vec ; f . x = damping * w * m * vn * n . x ; f . y = damping * w * m * vn * n . y ; final float inv mass = get particle inv mass ( ) ; vel a . x += inv mass * f . x ; vel a . y += inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 vel a = m velocity buffer . data [ a ] ; final vec 2 vel b = m velocity buffer . data [ b ] ; final float vx = vel b . x - vel a . x ; final float vy = vel b . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { float fx = damping * w * vn * n . x ; float fy = damping * w * vn * n . y ; vel a . x += fx ; vel a . y += fy ; vel b . x -= fx ; vel b . y -= fy ; } } } <SENTENCE_END/>

(Copy Probability: 9.5%)

<SENTENCE_START> { float damping = m damping strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final vec 2 vel a = m velocity buffer . data [ a ] ; float vx = - b . m angular velocity * temp y + b . m linear velocity . x - vel a . x ; float vy = b . m angular velocity * temp x + b . m linear velocity . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { final vec 2 f = temp vec ; f . x = damping * w * m * vn * n . x ; f . y = damping * w * m * vn * n . y ; final float inv mass = get particle inv mass ( ) ; vel a . x += inv mass * f . x ; vel a . y += inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 vel a = m velocity buffer . data [ a ] ; final vec 2 vel b = m velocity buffer . data [ b ] ; final float vx = vel b . x - vel a . x ; final float vy = vel b . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { float fx = damping * w * vn * n . x ; float fy = damping * w * vn * n . y ; vel a . x += fx ; vel a . y += fy ; vel b . x -= fx ; vel b . y -= fy ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { float damping = m damping strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final vec 2 vel a = m velocity buffer . data [ a ] ; float vx = - b . m angular velocity * temp y + b . m linear velocity . x - vel a . x ; float vy = b . m angular velocity * temp x + b . m linear velocity . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { final vec 2 f = temp vec ; f . x = damping * w * m * vn * n . x ; f . y = damping * w * m * vn * n . y ; final float inv mass = get particle inv mass ( ) ; vel a . x += inv mass * f . x ; vel a . y += inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 vel a = m velocity buffer . data [ a ] ; final vec 2 vel b = m velocity buffer . data [ b ] ; final float vx = vel b . x - vel a . x ; final float vy = vel b . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { float fx = damping * w * vn * n . x ; float fy = damping * w * vn * n . y ; vel a . x += fx ; vel a . y += fy ; vel b . x -= fx ; vel b . y -= fy ; } } } <SENTENCE_END/>

(Copy Probability: 13.8%)

<SENTENCE_START> { float damping = m damping strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 n = contact . normal ; vec 2 p = m position buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final vec 2 vel a = m velocity buffer . data [ a ] ; float vx = - b . m angular velocity * temp y + b . m linear velocity . x - vel a . x ; float vy = b . m angular velocity * temp x + b . m linear velocity . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { final vec 2 f = temp vec ; f . x = damping * w * m * vn * n . x ; f . y = damping * w * m * vn * n . y ; final float inv mass = get particle inv mass ( ) ; vel a . x += inv mass * f . x ; vel a . y += inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 vel a = m velocity buffer . data [ a ] ; final vec 2 vel b = m velocity buffer . data [ b ] ; final float vx = vel b . x - vel a . x ; final float vy = vel b . y - vel a . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { float fx = damping * w * vn * n . x ; float fy = damping * w * vn * n . y ; vel a . x += fx ; vel a . y += fy ; vel b . x -= fx ; vel b . y -= fy ; } } } <SENTENCE_END/>


Original Name solve,wall

solve

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 wall particle ) != 0 ) { final vec 2 r = m velocity buffer . data [ i ] ; r . x = 0.0f ; r . y = 0.0f ; } } } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 wall particle ) != 0 ) { final vec 2 r = m velocity buffer . data [ i ] ; r . x = 0.0f ; r . y = 0.0f ; } } } <SENTENCE_END/>

wall

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 wall particle ) != 0 ) { final vec 2 r = m velocity buffer . data [ i ] ; r . x = 0.0f ; r . y = 0.0f ; } } } <SENTENCE_END/>

(Copy Probability: 48.8%)

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 wall particle ) != 0 ) { final vec 2 r = m velocity buffer . data [ i ] ; r . x = 0.0f ; r . y = 0.0f ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 wall particle ) != 0 ) { final vec 2 r = m velocity buffer . data [ i ] ; r . x = 0.0f ; r . y = 0.0f ; } } } <SENTENCE_END/>

(Copy Probability: 9.1%)

<SENTENCE_START> { for ( int i = 0 ; i < m count ; i ++ ) { if ( ( m flags buffer . data [ i ] & particle type . b 2 wall particle ) != 0 ) { final vec 2 r = m velocity buffer . data [ i ] ; r . x = 0.0f ; r . y = 0.0f ; } } } <SENTENCE_END/>


Original Name solve,rigid

solve

<SENTENCE_START> { for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . update statistics ( ) ; vec 2 temp = temp vec ; vec 2 cross = temp vec 2 ; rot rotation = temp rot ; rotation . set ( step . dt * group . m angular velocity ) ; rot . mul to out unsafe ( rotation , group . m center , cross ) ; temp . set ( group . m linear velocity ) . mul local ( step . dt ) . add local ( group . m center ) . sub local ( cross ) ; temp xf . p . set ( temp ) ; temp xf . q . set ( rotation ) ; transform . mul to out ( temp xf , group . m transform , group . m transform ) ; final transform velocity transform = temp xf 2 ; velocity transform . p . x = step . inv dt * temp xf . p . x ; velocity transform . p . y = step . inv dt * temp xf . p . y ; velocity transform . q . s = step . inv dt * temp xf . q . s ; velocity transform . q . c = step . inv dt * ( temp xf . q . c - 1 ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { transform . mul to out unsafe ( velocity transform , m position buffer . data [ i ] , m velocity buffer . data [ i ] ) ; } } } } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . update statistics ( ) ; vec 2 temp = temp vec ; vec 2 cross = temp vec 2 ; rot rotation = temp rot ; rotation . set ( step . dt * group . m angular velocity ) ; rot . mul to out unsafe ( rotation , group . m center , cross ) ; temp . set ( group . m linear velocity ) . mul local ( step . dt ) . add local ( group . m center ) . sub local ( cross ) ; temp xf . p . set ( temp ) ; temp xf . q . set ( rotation ) ; transform . mul to out ( temp xf , group . m transform , group . m transform ) ; final transform velocity transform = temp xf 2 ; velocity transform . p . x = step . inv dt * temp xf . p . x ; velocity transform . p . y = step . inv dt * temp xf . p . y ; velocity transform . q . s = step . inv dt * temp xf . q . s ; velocity transform . q . c = step . inv dt * ( temp xf . q . c - 1 ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { transform . mul to out unsafe ( velocity transform , m position buffer . data [ i ] , m velocity buffer . data [ i ] ) ; } } } } <SENTENCE_END/>

rigid

<SENTENCE_START> { for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . update statistics ( ) ; vec 2 temp = temp vec ; vec 2 cross = temp vec 2 ; rot rotation = temp rot ; rotation . set ( step . dt * group . m angular velocity ) ; rot . mul to out unsafe ( rotation , group . m center , cross ) ; temp . set ( group . m linear velocity ) . mul local ( step . dt ) . add local ( group . m center ) . sub local ( cross ) ; temp xf . p . set ( temp ) ; temp xf . q . set ( rotation ) ; transform . mul to out ( temp xf , group . m transform , group . m transform ) ; final transform velocity transform = temp xf 2 ; velocity transform . p . x = step . inv dt * temp xf . p . x ; velocity transform . p . y = step . inv dt * temp xf . p . y ; velocity transform . q . s = step . inv dt * temp xf . q . s ; velocity transform . q . c = step . inv dt * ( temp xf . q . c - 1 ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { transform . mul to out unsafe ( velocity transform , m position buffer . data [ i ] , m velocity buffer . data [ i ] ) ; } } } } <SENTENCE_END/>

(Copy Probability: 13.3%)

<SENTENCE_START> { for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . update statistics ( ) ; vec 2 temp = temp vec ; vec 2 cross = temp vec 2 ; rot rotation = temp rot ; rotation . set ( step . dt * group . m angular velocity ) ; rot . mul to out unsafe ( rotation , group . m center , cross ) ; temp . set ( group . m linear velocity ) . mul local ( step . dt ) . add local ( group . m center ) . sub local ( cross ) ; temp xf . p . set ( temp ) ; temp xf . q . set ( rotation ) ; transform . mul to out ( temp xf , group . m transform , group . m transform ) ; final transform velocity transform = temp xf 2 ; velocity transform . p . x = step . inv dt * temp xf . p . x ; velocity transform . p . y = step . inv dt * temp xf . p . y ; velocity transform . q . s = step . inv dt * temp xf . q . s ; velocity transform . q . c = step . inv dt * ( temp xf . q . c - 1 ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { transform . mul to out unsafe ( velocity transform , m position buffer . data [ i ] , m velocity buffer . data [ i ] ) ; } } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . update statistics ( ) ; vec 2 temp = temp vec ; vec 2 cross = temp vec 2 ; rot rotation = temp rot ; rotation . set ( step . dt * group . m angular velocity ) ; rot . mul to out unsafe ( rotation , group . m center , cross ) ; temp . set ( group . m linear velocity ) . mul local ( step . dt ) . add local ( group . m center ) . sub local ( cross ) ; temp xf . p . set ( temp ) ; temp xf . q . set ( rotation ) ; transform . mul to out ( temp xf , group . m transform , group . m transform ) ; final transform velocity transform = temp xf 2 ; velocity transform . p . x = step . inv dt * temp xf . p . x ; velocity transform . p . y = step . inv dt * temp xf . p . y ; velocity transform . q . s = step . inv dt * temp xf . q . s ; velocity transform . q . c = step . inv dt * ( temp xf . q . c - 1 ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { transform . mul to out unsafe ( velocity transform , m position buffer . data [ i ] , m velocity buffer . data [ i ] ) ; } } } } <SENTENCE_END/>

(Copy Probability: 18.6%)

<SENTENCE_START> { for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . update statistics ( ) ; vec 2 temp = temp vec ; vec 2 cross = temp vec 2 ; rot rotation = temp rot ; rotation . set ( step . dt * group . m angular velocity ) ; rot . mul to out unsafe ( rotation , group . m center , cross ) ; temp . set ( group . m linear velocity ) . mul local ( step . dt ) . add local ( group . m center ) . sub local ( cross ) ; temp xf . p . set ( temp ) ; temp xf . q . set ( rotation ) ; transform . mul to out ( temp xf , group . m transform , group . m transform ) ; final transform velocity transform = temp xf 2 ; velocity transform . p . x = step . inv dt * temp xf . p . x ; velocity transform . p . y = step . inv dt * temp xf . p . y ; velocity transform . q . s = step . inv dt * temp xf . q . s ; velocity transform . q . c = step . inv dt * ( temp xf . q . c - 1 ) ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { transform . mul to out unsafe ( velocity transform , m position buffer . data [ i ] , m velocity buffer . data [ i ] ) ; } } } } <SENTENCE_END/>


Original Name solve,elastic

solve

<SENTENCE_START> { float elastic strength = step . inv dt * m elastic strength ; for ( int k = 0 ; k < m triad count ; k ++ ) { final triad triad = m triad buffer [ k ] ; if ( ( triad . flags & particle type . b 2 elastic particle ) != 0 ) { int a = triad . index a ; int b = triad . index b ; int c = triad . index c ; final vec 2 oa = triad . pa ; final vec 2 ob = triad . pb ; final vec 2 oc = triad . pc ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final vec 2 pc = m position buffer . data [ c ] ; final float px = 1f / 3 * ( pa . x + pb . x + pc . x ) ; final float py = 1f / 3 * ( pa . y + pb . y + pc . y ) ; float rs = vec 2 . cross ( oa , pa ) + vec 2 . cross ( ob , pb ) + vec 2 . cross ( oc , pc ) ; float rc = vec 2 . dot ( oa , pa ) + vec 2 . dot ( ob , pb ) + vec 2 . dot ( oc , pc ) ; float r 2 = rs * rs + rc * rc ; float inv r = r 2 == 0 ? float . max value : math utils . sqrt ( 1f / r 2 ) ; rs *= inv r ; rc *= inv r ; final float strength = elastic strength * triad . strength ; final float roax = rc * oa . x - rs * oa . y ; final float roay = rs * oa . x + rc * oa . y ; final float robx = rc * ob . x - rs * ob . y ; final float roby = rs * ob . x + rc * ob . y ; final float rocx = rc * oc . x - rs * oc . y ; final float rocy = rs * oc . x + rc * oc . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final vec 2 vc = m velocity buffer . data [ c ] ; va . x += strength * ( roax - ( pa . x - px ) ) ; va . y += strength * ( roay - ( pa . y - py ) ) ; vb . x += strength * ( robx - ( pb . x - px ) ) ; vb . y += strength * ( roby - ( pb . y - py ) ) ; vc . x += strength * ( rocx - ( pc . x - px ) ) ; vc . y += strength * ( rocy - ( pc . y - py ) ) ; } } } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { float elastic strength = step . inv dt * m elastic strength ; for ( int k = 0 ; k < m triad count ; k ++ ) { final triad triad = m triad buffer [ k ] ; if ( ( triad . flags & particle type . b 2 elastic particle ) != 0 ) { int a = triad . index a ; int b = triad . index b ; int c = triad . index c ; final vec 2 oa = triad . pa ; final vec 2 ob = triad . pb ; final vec 2 oc = triad . pc ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final vec 2 pc = m position buffer . data [ c ] ; final float px = 1f / 3 * ( pa . x + pb . x + pc . x ) ; final float py = 1f / 3 * ( pa . y + pb . y + pc . y ) ; float rs = vec 2 . cross ( oa , pa ) + vec 2 . cross ( ob , pb ) + vec 2 . cross ( oc , pc ) ; float rc = vec 2 . dot ( oa , pa ) + vec 2 . dot ( ob , pb ) + vec 2 . dot ( oc , pc ) ; float r 2 = rs * rs + rc * rc ; float inv r = r 2 == 0 ? float . max value : math utils . sqrt ( 1f / r 2 ) ; rs *= inv r ; rc *= inv r ; final float strength = elastic strength * triad . strength ; final float roax = rc * oa . x - rs * oa . y ; final float roay = rs * oa . x + rc * oa . y ; final float robx = rc * ob . x - rs * ob . y ; final float roby = rs * ob . x + rc * ob . y ; final float rocx = rc * oc . x - rs * oc . y ; final float rocy = rs * oc . x + rc * oc . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final vec 2 vc = m velocity buffer . data [ c ] ; va . x += strength * ( roax - ( pa . x - px ) ) ; va . y += strength * ( roay - ( pa . y - py ) ) ; vb . x += strength * ( robx - ( pb . x - px ) ) ; vb . y += strength * ( roby - ( pb . y - py ) ) ; vc . x += strength * ( rocx - ( pc . x - px ) ) ; vc . y += strength * ( rocy - ( pc . y - py ) ) ; } } } <SENTENCE_END/>

elastic

<SENTENCE_START> { float elastic strength = step . inv dt * m elastic strength ; for ( int k = 0 ; k < m triad count ; k ++ ) { final triad triad = m triad buffer [ k ] ; if ( ( triad . flags & particle type . b 2 elastic particle ) != 0 ) { int a = triad . index a ; int b = triad . index b ; int c = triad . index c ; final vec 2 oa = triad . pa ; final vec 2 ob = triad . pb ; final vec 2 oc = triad . pc ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final vec 2 pc = m position buffer . data [ c ] ; final float px = 1f / 3 * ( pa . x + pb . x + pc . x ) ; final float py = 1f / 3 * ( pa . y + pb . y + pc . y ) ; float rs = vec 2 . cross ( oa , pa ) + vec 2 . cross ( ob , pb ) + vec 2 . cross ( oc , pc ) ; float rc = vec 2 . dot ( oa , pa ) + vec 2 . dot ( ob , pb ) + vec 2 . dot ( oc , pc ) ; float r 2 = rs * rs + rc * rc ; float inv r = r 2 == 0 ? float . max value : math utils . sqrt ( 1f / r 2 ) ; rs *= inv r ; rc *= inv r ; final float strength = elastic strength * triad . strength ; final float roax = rc * oa . x - rs * oa . y ; final float roay = rs * oa . x + rc * oa . y ; final float robx = rc * ob . x - rs * ob . y ; final float roby = rs * ob . x + rc * ob . y ; final float rocx = rc * oc . x - rs * oc . y ; final float rocy = rs * oc . x + rc * oc . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final vec 2 vc = m velocity buffer . data [ c ] ; va . x += strength * ( roax - ( pa . x - px ) ) ; va . y += strength * ( roay - ( pa . y - py ) ) ; vb . x += strength * ( robx - ( pb . x - px ) ) ; vb . y += strength * ( roby - ( pb . y - py ) ) ; vc . x += strength * ( rocx - ( pc . x - px ) ) ; vc . y += strength * ( rocy - ( pc . y - py ) ) ; } } } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { float elastic strength = step . inv dt * m elastic strength ; for ( int k = 0 ; k < m triad count ; k ++ ) { final triad triad = m triad buffer [ k ] ; if ( ( triad . flags & particle type . b 2 elastic particle ) != 0 ) { int a = triad . index a ; int b = triad . index b ; int c = triad . index c ; final vec 2 oa = triad . pa ; final vec 2 ob = triad . pb ; final vec 2 oc = triad . pc ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final vec 2 pc = m position buffer . data [ c ] ; final float px = 1f / 3 * ( pa . x + pb . x + pc . x ) ; final float py = 1f / 3 * ( pa . y + pb . y + pc . y ) ; float rs = vec 2 . cross ( oa , pa ) + vec 2 . cross ( ob , pb ) + vec 2 . cross ( oc , pc ) ; float rc = vec 2 . dot ( oa , pa ) + vec 2 . dot ( ob , pb ) + vec 2 . dot ( oc , pc ) ; float r 2 = rs * rs + rc * rc ; float inv r = r 2 == 0 ? float . max value : math utils . sqrt ( 1f / r 2 ) ; rs *= inv r ; rc *= inv r ; final float strength = elastic strength * triad . strength ; final float roax = rc * oa . x - rs * oa . y ; final float roay = rs * oa . x + rc * oa . y ; final float robx = rc * ob . x - rs * ob . y ; final float roby = rs * ob . x + rc * ob . y ; final float rocx = rc * oc . x - rs * oc . y ; final float rocy = rs * oc . x + rc * oc . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final vec 2 vc = m velocity buffer . data [ c ] ; va . x += strength * ( roax - ( pa . x - px ) ) ; va . y += strength * ( roay - ( pa . y - py ) ) ; vb . x += strength * ( robx - ( pb . x - px ) ) ; vb . y += strength * ( roby - ( pb . y - py ) ) ; vc . x += strength * ( rocx - ( pc . x - px ) ) ; vc . y += strength * ( rocy - ( pc . y - py ) ) ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { float elastic strength = step . inv dt * m elastic strength ; for ( int k = 0 ; k < m triad count ; k ++ ) { final triad triad = m triad buffer [ k ] ; if ( ( triad . flags & particle type . b 2 elastic particle ) != 0 ) { int a = triad . index a ; int b = triad . index b ; int c = triad . index c ; final vec 2 oa = triad . pa ; final vec 2 ob = triad . pb ; final vec 2 oc = triad . pc ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final vec 2 pc = m position buffer . data [ c ] ; final float px = 1f / 3 * ( pa . x + pb . x + pc . x ) ; final float py = 1f / 3 * ( pa . y + pb . y + pc . y ) ; float rs = vec 2 . cross ( oa , pa ) + vec 2 . cross ( ob , pb ) + vec 2 . cross ( oc , pc ) ; float rc = vec 2 . dot ( oa , pa ) + vec 2 . dot ( ob , pb ) + vec 2 . dot ( oc , pc ) ; float r 2 = rs * rs + rc * rc ; float inv r = r 2 == 0 ? float . max value : math utils . sqrt ( 1f / r 2 ) ; rs *= inv r ; rc *= inv r ; final float strength = elastic strength * triad . strength ; final float roax = rc * oa . x - rs * oa . y ; final float roay = rs * oa . x + rc * oa . y ; final float robx = rc * ob . x - rs * ob . y ; final float roby = rs * ob . x + rc * ob . y ; final float rocx = rc * oc . x - rs * oc . y ; final float rocy = rs * oc . x + rc * oc . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final vec 2 vc = m velocity buffer . data [ c ] ; va . x += strength * ( roax - ( pa . x - px ) ) ; va . y += strength * ( roay - ( pa . y - py ) ) ; vb . x += strength * ( robx - ( pb . x - px ) ) ; vb . y += strength * ( roby - ( pb . y - py ) ) ; vc . x += strength * ( rocx - ( pc . x - px ) ) ; vc . y += strength * ( rocy - ( pc . y - py ) ) ; } } } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { float elastic strength = step . inv dt * m elastic strength ; for ( int k = 0 ; k < m triad count ; k ++ ) { final triad triad = m triad buffer [ k ] ; if ( ( triad . flags & particle type . b 2 elastic particle ) != 0 ) { int a = triad . index a ; int b = triad . index b ; int c = triad . index c ; final vec 2 oa = triad . pa ; final vec 2 ob = triad . pb ; final vec 2 oc = triad . pc ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final vec 2 pc = m position buffer . data [ c ] ; final float px = 1f / 3 * ( pa . x + pb . x + pc . x ) ; final float py = 1f / 3 * ( pa . y + pb . y + pc . y ) ; float rs = vec 2 . cross ( oa , pa ) + vec 2 . cross ( ob , pb ) + vec 2 . cross ( oc , pc ) ; float rc = vec 2 . dot ( oa , pa ) + vec 2 . dot ( ob , pb ) + vec 2 . dot ( oc , pc ) ; float r 2 = rs * rs + rc * rc ; float inv r = r 2 == 0 ? float . max value : math utils . sqrt ( 1f / r 2 ) ; rs *= inv r ; rc *= inv r ; final float strength = elastic strength * triad . strength ; final float roax = rc * oa . x - rs * oa . y ; final float roay = rs * oa . x + rc * oa . y ; final float robx = rc * ob . x - rs * ob . y ; final float roby = rs * ob . x + rc * ob . y ; final float rocx = rc * oc . x - rs * oc . y ; final float rocy = rs * oc . x + rc * oc . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final vec 2 vc = m velocity buffer . data [ c ] ; va . x += strength * ( roax - ( pa . x - px ) ) ; va . y += strength * ( roay - ( pa . y - py ) ) ; vb . x += strength * ( robx - ( pb . x - px ) ) ; vb . y += strength * ( roby - ( pb . y - py ) ) ; vc . x += strength * ( rocx - ( pc . x - px ) ) ; vc . y += strength * ( rocy - ( pc . y - py ) ) ; } } } <SENTENCE_END/>


Original Name solve,spring

solve

<SENTENCE_START> { float spring strength = step . inv dt * m spring strength ; for ( int k = 0 ; k < m pair count ; k ++ ) { final pair pair = m pair buffer [ k ] ; if ( ( pair . flags & particle type . b 2 spring particle ) != 0 ) { int a = pair . index a ; int b = pair . index b ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final float dx = pb . x - pa . x ; final float dy = pb . y - pa . y ; float r 0 = pair . distance ; float r 1 = math utils . sqrt ( dx * dx + dy * dy ) ; if ( r 1 == 0 ) r 1 = float . max value ; float strength = spring strength * pair . strength ; final float fx = strength * ( r 0 - r 1 ) / r 1 * dx ; final float fy = strength * ( r 0 - r 1 ) / r 1 * dy ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { float spring strength = step . inv dt * m spring strength ; for ( int k = 0 ; k < m pair count ; k ++ ) { final pair pair = m pair buffer [ k ] ; if ( ( pair . flags & particle type . b 2 spring particle ) != 0 ) { int a = pair . index a ; int b = pair . index b ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final float dx = pb . x - pa . x ; final float dy = pb . y - pa . y ; float r 0 = pair . distance ; float r 1 = math utils . sqrt ( dx * dx + dy * dy ) ; if ( r 1 == 0 ) r 1 = float . max value ; float strength = spring strength * pair . strength ; final float fx = strength * ( r 0 - r 1 ) / r 1 * dx ; final float fy = strength * ( r 0 - r 1 ) / r 1 * dy ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

spring

<SENTENCE_START> { float spring strength = step . inv dt * m spring strength ; for ( int k = 0 ; k < m pair count ; k ++ ) { final pair pair = m pair buffer [ k ] ; if ( ( pair . flags & particle type . b 2 spring particle ) != 0 ) { int a = pair . index a ; int b = pair . index b ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final float dx = pb . x - pa . x ; final float dy = pb . y - pa . y ; float r 0 = pair . distance ; float r 1 = math utils . sqrt ( dx * dx + dy * dy ) ; if ( r 1 == 0 ) r 1 = float . max value ; float strength = spring strength * pair . strength ; final float fx = strength * ( r 0 - r 1 ) / r 1 * dx ; final float fy = strength * ( r 0 - r 1 ) / r 1 * dy ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 5.0%)

<SENTENCE_START> { float spring strength = step . inv dt * m spring strength ; for ( int k = 0 ; k < m pair count ; k ++ ) { final pair pair = m pair buffer [ k ] ; if ( ( pair . flags & particle type . b 2 spring particle ) != 0 ) { int a = pair . index a ; int b = pair . index b ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final float dx = pb . x - pa . x ; final float dy = pb . y - pa . y ; float r 0 = pair . distance ; float r 1 = math utils . sqrt ( dx * dx + dy * dy ) ; if ( r 1 == 0 ) r 1 = float . max value ; float strength = spring strength * pair . strength ; final float fx = strength * ( r 0 - r 1 ) / r 1 * dx ; final float fy = strength * ( r 0 - r 1 ) / r 1 * dy ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { float spring strength = step . inv dt * m spring strength ; for ( int k = 0 ; k < m pair count ; k ++ ) { final pair pair = m pair buffer [ k ] ; if ( ( pair . flags & particle type . b 2 spring particle ) != 0 ) { int a = pair . index a ; int b = pair . index b ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final float dx = pb . x - pa . x ; final float dy = pb . y - pa . y ; float r 0 = pair . distance ; float r 1 = math utils . sqrt ( dx * dx + dy * dy ) ; if ( r 1 == 0 ) r 1 = float . max value ; float strength = spring strength * pair . strength ; final float fx = strength * ( r 0 - r 1 ) / r 1 * dx ; final float fy = strength * ( r 0 - r 1 ) / r 1 * dy ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 5.6%)

<SENTENCE_START> { float spring strength = step . inv dt * m spring strength ; for ( int k = 0 ; k < m pair count ; k ++ ) { final pair pair = m pair buffer [ k ] ; if ( ( pair . flags & particle type . b 2 spring particle ) != 0 ) { int a = pair . index a ; int b = pair . index b ; final vec 2 pa = m position buffer . data [ a ] ; final vec 2 pb = m position buffer . data [ b ] ; final float dx = pb . x - pa . x ; final float dy = pb . y - pa . y ; float r 0 = pair . distance ; float r 1 = math utils . sqrt ( dx * dx + dy * dy ) ; if ( r 1 == 0 ) r 1 = float . max value ; float strength = spring strength * pair . strength ; final float fx = strength * ( r 0 - r 1 ) / r 1 * dx ; final float fy = strength * ( r 0 - r 1 ) / r 1 * dy ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>


Original Name solve,tensile

solve

<SENTENCE_START> { m accumulation 2 buffer = request particle buffer ( vec 2 . class , m accumulation 2 buffer ) ; for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; m accumulation 2 buffer [ i ] . set zero ( ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; final float inter = ( 1 - w ) * w ; a 2 a . x -= inter * n . x ; a 2 a . y -= inter * n . y ; a 2 b . x += inter * n . x ; a 2 b . y += inter * n . y ; } } float strength a = m surface tension strength a * get critical velocity ( step ) ; float strength b = m surface tension strength b * get critical velocity ( step ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float sx = a 2 b . x - a 2 a . x ; final float sy = a 2 b . y - a 2 a . y ; float fn = ( strength a * ( h - 2 ) + strength b * ( sx * n . x + sy * n . y ) ) * w ; final float fx = fn * n . x ; final float fy = fn * n . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { m accumulation 2 buffer = request particle buffer ( vec 2 . class , m accumulation 2 buffer ) ; for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; m accumulation 2 buffer [ i ] . set zero ( ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; final float inter = ( 1 - w ) * w ; a 2 a . x -= inter * n . x ; a 2 a . y -= inter * n . y ; a 2 b . x += inter * n . x ; a 2 b . y += inter * n . y ; } } float strength a = m surface tension strength a * get critical velocity ( step ) ; float strength b = m surface tension strength b * get critical velocity ( step ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float sx = a 2 b . x - a 2 a . x ; final float sy = a 2 b . y - a 2 a . y ; float fn = ( strength a * ( h - 2 ) + strength b * ( sx * n . x + sy * n . y ) ) * w ; final float fx = fn * n . x ; final float fy = fn * n . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

tensile

<SENTENCE_START> { m accumulation 2 buffer = request particle buffer ( vec 2 . class , m accumulation 2 buffer ) ; for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; m accumulation 2 buffer [ i ] . set zero ( ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; final float inter = ( 1 - w ) * w ; a 2 a . x -= inter * n . x ; a 2 a . y -= inter * n . y ; a 2 b . x += inter * n . x ; a 2 b . y += inter * n . y ; } } float strength a = m surface tension strength a * get critical velocity ( step ) ; float strength b = m surface tension strength b * get critical velocity ( step ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float sx = a 2 b . x - a 2 a . x ; final float sy = a 2 b . y - a 2 a . y ; float fn = ( strength a * ( h - 2 ) + strength b * ( sx * n . x + sy * n . y ) ) * w ; final float fx = fn * n . x ; final float fy = fn * n . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 5.8%)

<SENTENCE_START> { m accumulation 2 buffer = request particle buffer ( vec 2 . class , m accumulation 2 buffer ) ; for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; m accumulation 2 buffer [ i ] . set zero ( ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; final float inter = ( 1 - w ) * w ; a 2 a . x -= inter * n . x ; a 2 a . y -= inter * n . y ; a 2 b . x += inter * n . x ; a 2 b . y += inter * n . y ; } } float strength a = m surface tension strength a * get critical velocity ( step ) ; float strength b = m surface tension strength b * get critical velocity ( step ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float sx = a 2 b . x - a 2 a . x ; final float sy = a 2 b . y - a 2 a . y ; float fn = ( strength a * ( h - 2 ) + strength b * ( sx * n . x + sy * n . y ) ) * w ; final float fx = fn * n . x ; final float fy = fn * n . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { m accumulation 2 buffer = request particle buffer ( vec 2 . class , m accumulation 2 buffer ) ; for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; m accumulation 2 buffer [ i ] . set zero ( ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; final float inter = ( 1 - w ) * w ; a 2 a . x -= inter * n . x ; a 2 a . y -= inter * n . y ; a 2 b . x += inter * n . x ; a 2 b . y += inter * n . y ; } } float strength a = m surface tension strength a * get critical velocity ( step ) ; float strength b = m surface tension strength b * get critical velocity ( step ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float sx = a 2 b . x - a 2 a . x ; final float sy = a 2 b . y - a 2 a . y ; float fn = ( strength a * ( h - 2 ) + strength b * ( sx * n . x + sy * n . y ) ) * w ; final float fx = fn * n . x ; final float fy = fn * n . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 6.2%)

<SENTENCE_START> { m accumulation 2 buffer = request particle buffer ( vec 2 . class , m accumulation 2 buffer ) ; for ( int i = 0 ; i < m count ; i ++ ) { m accumulation buffer [ i ] = 0 ; m accumulation 2 buffer [ i ] . set zero ( ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; m accumulation buffer [ a ] += w ; m accumulation buffer [ b ] += w ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; final float inter = ( 1 - w ) * w ; a 2 a . x -= inter * n . x ; a 2 a . y -= inter * n . y ; a 2 b . x += inter * n . x ; a 2 b . y += inter * n . y ; } } float strength a = m surface tension strength a * get critical velocity ( step ) ; float strength b = m surface tension strength b * get critical velocity ( step ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 tensile particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; vec 2 n = contact . normal ; final vec 2 a 2 a = m accumulation 2 buffer [ a ] ; final vec 2 a 2 b = m accumulation 2 buffer [ b ] ; float h = m accumulation buffer [ a ] + m accumulation buffer [ b ] ; final float sx = a 2 b . x - a 2 a . x ; final float sy = a 2 b . y - a 2 a . y ; float fn = ( strength a * ( h - 2 ) + strength b * ( sx * n . x + sy * n . y ) ) * w ; final float fx = fn * n . x ; final float fy = fn * n . y ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>


Original Name solve,viscous

solve

<SENTENCE_START> { float viscous strength = m viscous strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 viscous particle ) != 0 ) { body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; final vec 2 va = m velocity buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final float vx = - b . m angular velocity * temp y + b . m linear velocity . x - va . x ; final float vy = b . m angular velocity * temp x + b . m linear velocity . y - va . y ; final vec 2 f = temp vec ; final float p inv mass = get particle inv mass ( ) ; f . x = viscous strength * m * w * vx ; f . y = viscous strength * m * w * vy ; va . x += p inv mass * f . x ; va . y += p inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 viscous particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; final float fx = viscous strength * w * vx ; final float fy = viscous strength * w * vy ; va . x += fx ; va . y += fy ; vb . x -= fx ; vb . y -= fy ; } } } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { float viscous strength = m viscous strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 viscous particle ) != 0 ) { body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; final vec 2 va = m velocity buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final float vx = - b . m angular velocity * temp y + b . m linear velocity . x - va . x ; final float vy = b . m angular velocity * temp x + b . m linear velocity . y - va . y ; final vec 2 f = temp vec ; final float p inv mass = get particle inv mass ( ) ; f . x = viscous strength * m * w * vx ; f . y = viscous strength * m * w * vy ; va . x += p inv mass * f . x ; va . y += p inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 viscous particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; final float fx = viscous strength * w * vx ; final float fy = viscous strength * w * vy ; va . x += fx ; va . y += fy ; vb . x -= fx ; vb . y -= fy ; } } } <SENTENCE_END/>

viscous

<SENTENCE_START> { float viscous strength = m viscous strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 viscous particle ) != 0 ) { body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; final vec 2 va = m velocity buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final float vx = - b . m angular velocity * temp y + b . m linear velocity . x - va . x ; final float vy = b . m angular velocity * temp x + b . m linear velocity . y - va . y ; final vec 2 f = temp vec ; final float p inv mass = get particle inv mass ( ) ; f . x = viscous strength * m * w * vx ; f . y = viscous strength * m * w * vy ; va . x += p inv mass * f . x ; va . y += p inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 viscous particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; final float fx = viscous strength * w * vx ; final float fy = viscous strength * w * vy ; va . x += fx ; va . y += fy ; vb . x -= fx ; vb . y -= fy ; } } } <SENTENCE_END/>

(Copy Probability: 8.2%)

<SENTENCE_START> { float viscous strength = m viscous strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 viscous particle ) != 0 ) { body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; final vec 2 va = m velocity buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final float vx = - b . m angular velocity * temp y + b . m linear velocity . x - va . x ; final float vy = b . m angular velocity * temp x + b . m linear velocity . y - va . y ; final vec 2 f = temp vec ; final float p inv mass = get particle inv mass ( ) ; f . x = viscous strength * m * w * vx ; f . y = viscous strength * m * w * vy ; va . x += p inv mass * f . x ; va . y += p inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 viscous particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; final float fx = viscous strength * w * vx ; final float fy = viscous strength * w * vy ; va . x += fx ; va . y += fy ; vb . x -= fx ; vb . y -= fy ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { float viscous strength = m viscous strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 viscous particle ) != 0 ) { body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; final vec 2 va = m velocity buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final float vx = - b . m angular velocity * temp y + b . m linear velocity . x - va . x ; final float vy = b . m angular velocity * temp x + b . m linear velocity . y - va . y ; final vec 2 f = temp vec ; final float p inv mass = get particle inv mass ( ) ; f . x = viscous strength * m * w * vx ; f . y = viscous strength * m * w * vy ; va . x += p inv mass * f . x ; va . y += p inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 viscous particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; final float fx = viscous strength * w * vx ; final float fy = viscous strength * w * vy ; va . x += fx ; va . y += fy ; vb . x -= fx ; vb . y -= fy ; } } } <SENTENCE_END/>

(Copy Probability: 9.2%)

<SENTENCE_START> { float viscous strength = m viscous strength ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 viscous particle ) != 0 ) { body b = contact . body ; float w = contact . weight ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; final vec 2 va = m velocity buffer . data [ a ] ; final float temp x = p . x - b . m sweep . c . x ; final float temp y = p . y - b . m sweep . c . y ; final float vx = - b . m angular velocity * temp y + b . m linear velocity . x - va . x ; final float vy = b . m angular velocity * temp x + b . m linear velocity . y - va . y ; final vec 2 f = temp vec ; final float p inv mass = get particle inv mass ( ) ; f . x = viscous strength * m * w * vx ; f . y = viscous strength * m * w * vy ; va . x += p inv mass * f . x ; va . y += p inv mass * f . y ; f . x = - f . x ; f . y = - f . y ; b . apply linear impulse ( f , p , true ) ; } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 viscous particle ) != 0 ) { int a = contact . index a ; int b = contact . index b ; float w = contact . weight ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; final float fx = viscous strength * w * vx ; final float fy = viscous strength * w * vy ; va . x += fx ; va . y += fy ; vb . x -= fx ; vb . y -= fy ; } } } <SENTENCE_END/>


Original Name solve,powder

solve

<SENTENCE_START> { float powder strength = m powder strength * get critical velocity ( step ) ; float min weight = 1.0f - settings . particle stride ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { body b = contact . body ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; vec 2 n = contact . normal ; final vec 2 f = temp vec ; final vec 2 va = m velocity buffer . data [ a ] ; final float inter = powder strength * m * ( w - min weight ) ; final float p inv mass = get particle inv mass ( ) ; f . x = inter * n . x ; f . y = inter * n . y ; va . x -= p inv mass * f . x ; va . y -= p inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = powder strength * ( w - min weight ) ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { float powder strength = m powder strength * get critical velocity ( step ) ; float min weight = 1.0f - settings . particle stride ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { body b = contact . body ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; vec 2 n = contact . normal ; final vec 2 f = temp vec ; final vec 2 va = m velocity buffer . data [ a ] ; final float inter = powder strength * m * ( w - min weight ) ; final float p inv mass = get particle inv mass ( ) ; f . x = inter * n . x ; f . y = inter * n . y ; va . x -= p inv mass * f . x ; va . y -= p inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = powder strength * ( w - min weight ) ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } } <SENTENCE_END/>

powder

<SENTENCE_START> { float powder strength = m powder strength * get critical velocity ( step ) ; float min weight = 1.0f - settings . particle stride ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { body b = contact . body ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; vec 2 n = contact . normal ; final vec 2 f = temp vec ; final vec 2 va = m velocity buffer . data [ a ] ; final float inter = powder strength * m * ( w - min weight ) ; final float p inv mass = get particle inv mass ( ) ; f . x = inter * n . x ; f . y = inter * n . y ; va . x -= p inv mass * f . x ; va . y -= p inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = powder strength * ( w - min weight ) ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } } <SENTENCE_END/>

(Copy Probability: 9.5%)

<SENTENCE_START> { float powder strength = m powder strength * get critical velocity ( step ) ; float min weight = 1.0f - settings . particle stride ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { body b = contact . body ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; vec 2 n = contact . normal ; final vec 2 f = temp vec ; final vec 2 va = m velocity buffer . data [ a ] ; final float inter = powder strength * m * ( w - min weight ) ; final float p inv mass = get particle inv mass ( ) ; f . x = inter * n . x ; f . y = inter * n . y ; va . x -= p inv mass * f . x ; va . y -= p inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = powder strength * ( w - min weight ) ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { float powder strength = m powder strength * get critical velocity ( step ) ; float min weight = 1.0f - settings . particle stride ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { body b = contact . body ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; vec 2 n = contact . normal ; final vec 2 f = temp vec ; final vec 2 va = m velocity buffer . data [ a ] ; final float inter = powder strength * m * ( w - min weight ) ; final float p inv mass = get particle inv mass ( ) ; f . x = inter * n . x ; f . y = inter * n . y ; va . x -= p inv mass * f . x ; va . y -= p inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = powder strength * ( w - min weight ) ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } } <SENTENCE_END/>

(Copy Probability: 11.3%)

<SENTENCE_START> { float powder strength = m powder strength * get critical velocity ( step ) ; float min weight = 1.0f - settings . particle stride ; for ( int k = 0 ; k < m body contact count ; k ++ ) { final particle body contact contact = m body contact buffer [ k ] ; int a = contact . index ; if ( ( m flags buffer . data [ a ] & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { body b = contact . body ; float m = contact . mass ; vec 2 p = m position buffer . data [ a ] ; vec 2 n = contact . normal ; final vec 2 f = temp vec ; final vec 2 va = m velocity buffer . data [ a ] ; final float inter = powder strength * m * ( w - min weight ) ; final float p inv mass = get particle inv mass ( ) ; f . x = inter * n . x ; f . y = inter * n . y ; va . x -= p inv mass * f . x ; va . y -= p inv mass * f . y ; b . apply linear impulse ( f , p , true ) ; } } } for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; if ( ( contact . flags & particle type . b 2 powder particle ) != 0 ) { float w = contact . weight ; if ( w > min weight ) { int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = powder strength * ( w - min weight ) ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } } <SENTENCE_END/>


Original Name solve,solid

solve

<SENTENCE_START> { m depth buffer = request particle buffer ( m depth buffer ) ; float ejection strength = step . inv dt * m ejection strength ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( m group buffer [ a ] != m group buffer [ b ] ) { float w = contact . weight ; vec 2 n = contact . normal ; float h = m depth buffer [ a ] + m depth buffer [ b ] ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = ejection strength * h * w ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { m depth buffer = request particle buffer ( m depth buffer ) ; float ejection strength = step . inv dt * m ejection strength ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( m group buffer [ a ] != m group buffer [ b ] ) { float w = contact . weight ; vec 2 n = contact . normal ; float h = m depth buffer [ a ] + m depth buffer [ b ] ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = ejection strength * h * w ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

solid

<SENTENCE_START> { m depth buffer = request particle buffer ( m depth buffer ) ; float ejection strength = step . inv dt * m ejection strength ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( m group buffer [ a ] != m group buffer [ b ] ) { float w = contact . weight ; vec 2 n = contact . normal ; float h = m depth buffer [ a ] + m depth buffer [ b ] ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = ejection strength * h * w ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 40.1%)

<SENTENCE_START> { m depth buffer = request particle buffer ( m depth buffer ) ; float ejection strength = step . inv dt * m ejection strength ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( m group buffer [ a ] != m group buffer [ b ] ) { float w = contact . weight ; vec 2 n = contact . normal ; float h = m depth buffer [ a ] + m depth buffer [ b ] ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = ejection strength * h * w ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { m depth buffer = request particle buffer ( m depth buffer ) ; float ejection strength = step . inv dt * m ejection strength ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( m group buffer [ a ] != m group buffer [ b ] ) { float w = contact . weight ; vec 2 n = contact . normal ; float h = m depth buffer [ a ] + m depth buffer [ b ] ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = ejection strength * h * w ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>

(Copy Probability: 20.0%)

<SENTENCE_START> { m depth buffer = request particle buffer ( m depth buffer ) ; float ejection strength = step . inv dt * m ejection strength ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( m group buffer [ a ] != m group buffer [ b ] ) { float w = contact . weight ; vec 2 n = contact . normal ; float h = m depth buffer [ a ] + m depth buffer [ b ] ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float inter = ejection strength * h * w ; final float fx = inter * n . x ; final float fy = inter * n . y ; va . x -= fx ; va . y -= fy ; vb . x += fx ; vb . y += fy ; } } } <SENTENCE_END/>


Original Name solve,color,mixing

solve

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; int color mixing 256 = ( int ) ( 256 * m color mixing strength ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( ( m flags buffer . data [ a ] & m flags buffer . data [ b ] & particle type . b 2 color mixing particle ) != 0 ) { particle color color a = m color buffer . data [ a ] ; particle color color b = m color buffer . data [ b ] ; int dr = ( color mixing 256 * ( color b . r - color a . r ) ) >> 8 ; int dg = ( color mixing 256 * ( color b . g - color a . g ) ) >> 8 ; int db = ( color mixing 256 * ( color b . b - color a . b ) ) >> 8 ; int da = ( color mixing 256 * ( color b . a - color a . a ) ) >> 8 ; color a . r += dr ; color a . g += dg ; color a . b += db ; color a . a += da ; color b . r -= dr ; color b . g -= dg ; color b . b -= db ; color b . a -= da ; } } } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; int color mixing 256 = ( int ) ( 256 * m color mixing strength ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( ( m flags buffer . data [ a ] & m flags buffer . data [ b ] & particle type . b 2 color mixing particle ) != 0 ) { particle color color a = m color buffer . data [ a ] ; particle color color b = m color buffer . data [ b ] ; int dr = ( color mixing 256 * ( color b . r - color a . r ) ) >> 8 ; int dg = ( color mixing 256 * ( color b . g - color a . g ) ) >> 8 ; int db = ( color mixing 256 * ( color b . b - color a . b ) ) >> 8 ; int da = ( color mixing 256 * ( color b . a - color a . a ) ) >> 8 ; color a . r += dr ; color a . g += dg ; color a . b += db ; color a . a += da ; color b . r -= dr ; color b . g -= dg ; color b . b -= db ; color b . a -= da ; } } } <SENTENCE_END/>

color

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; int color mixing 256 = ( int ) ( 256 * m color mixing strength ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( ( m flags buffer . data [ a ] & m flags buffer . data [ b ] & particle type . b 2 color mixing particle ) != 0 ) { particle color color a = m color buffer . data [ a ] ; particle color color b = m color buffer . data [ b ] ; int dr = ( color mixing 256 * ( color b . r - color a . r ) ) >> 8 ; int dg = ( color mixing 256 * ( color b . g - color a . g ) ) >> 8 ; int db = ( color mixing 256 * ( color b . b - color a . b ) ) >> 8 ; int da = ( color mixing 256 * ( color b . a - color a . a ) ) >> 8 ; color a . r += dr ; color a . g += dg ; color a . b += db ; color a . a += da ; color b . r -= dr ; color b . g -= dg ; color b . b -= db ; color b . a -= da ; } } } <SENTENCE_END/>

(Copy Probability: 9.6%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; int color mixing 256 = ( int ) ( 256 * m color mixing strength ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( ( m flags buffer . data [ a ] & m flags buffer . data [ b ] & particle type . b 2 color mixing particle ) != 0 ) { particle color color a = m color buffer . data [ a ] ; particle color color b = m color buffer . data [ b ] ; int dr = ( color mixing 256 * ( color b . r - color a . r ) ) >> 8 ; int dg = ( color mixing 256 * ( color b . g - color a . g ) ) >> 8 ; int db = ( color mixing 256 * ( color b . b - color a . b ) ) >> 8 ; int da = ( color mixing 256 * ( color b . a - color a . a ) ) >> 8 ; color a . r += dr ; color a . g += dg ; color a . b += db ; color a . a += da ; color b . r -= dr ; color b . g -= dg ; color b . b -= db ; color b . a -= da ; } } } <SENTENCE_END/>

mixing

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; int color mixing 256 = ( int ) ( 256 * m color mixing strength ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( ( m flags buffer . data [ a ] & m flags buffer . data [ b ] & particle type . b 2 color mixing particle ) != 0 ) { particle color color a = m color buffer . data [ a ] ; particle color color b = m color buffer . data [ b ] ; int dr = ( color mixing 256 * ( color b . r - color a . r ) ) >> 8 ; int dg = ( color mixing 256 * ( color b . g - color a . g ) ) >> 8 ; int db = ( color mixing 256 * ( color b . b - color a . b ) ) >> 8 ; int da = ( color mixing 256 * ( color b . a - color a . a ) ) >> 8 ; color a . r += dr ; color a . g += dg ; color a . b += db ; color a . a += da ; color b . r -= dr ; color b . g -= dg ; color b . b -= db ; color b . a -= da ; } } } <SENTENCE_END/>

(Copy Probability: 13.2%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; int color mixing 256 = ( int ) ( 256 * m color mixing strength ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( ( m flags buffer . data [ a ] & m flags buffer . data [ b ] & particle type . b 2 color mixing particle ) != 0 ) { particle color color a = m color buffer . data [ a ] ; particle color color b = m color buffer . data [ b ] ; int dr = ( color mixing 256 * ( color b . r - color a . r ) ) >> 8 ; int dg = ( color mixing 256 * ( color b . g - color a . g ) ) >> 8 ; int db = ( color mixing 256 * ( color b . b - color a . b ) ) >> 8 ; int da = ( color mixing 256 * ( color b . a - color a . a ) ) >> 8 ; color a . r += dr ; color a . g += dg ; color a . b += db ; color a . a += da ; color b . r -= dr ; color b . g -= dg ; color b . b -= db ; color b . a -= da ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; int color mixing 256 = ( int ) ( 256 * m color mixing strength ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( ( m flags buffer . data [ a ] & m flags buffer . data [ b ] & particle type . b 2 color mixing particle ) != 0 ) { particle color color a = m color buffer . data [ a ] ; particle color color b = m color buffer . data [ b ] ; int dr = ( color mixing 256 * ( color b . r - color a . r ) ) >> 8 ; int dg = ( color mixing 256 * ( color b . g - color a . g ) ) >> 8 ; int db = ( color mixing 256 * ( color b . b - color a . b ) ) >> 8 ; int da = ( color mixing 256 * ( color b . a - color a . a ) ) >> 8 ; color a . r += dr ; color a . g += dg ; color a . b += db ; color a . a += da ; color b . r -= dr ; color b . g -= dg ; color b . b -= db ; color b . a -= da ; } } } <SENTENCE_END/>

(Copy Probability: 6.5%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; int color mixing 256 = ( int ) ( 256 * m color mixing strength ) ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; if ( ( m flags buffer . data [ a ] & m flags buffer . data [ b ] & particle type . b 2 color mixing particle ) != 0 ) { particle color color a = m color buffer . data [ a ] ; particle color color b = m color buffer . data [ b ] ; int dr = ( color mixing 256 * ( color b . r - color a . r ) ) >> 8 ; int dg = ( color mixing 256 * ( color b . g - color a . g ) ) >> 8 ; int db = ( color mixing 256 * ( color b . b - color a . b ) ) >> 8 ; int da = ( color mixing 256 * ( color b . a - color a . a ) ) >> 8 ; color a . r += dr ; color a . g += dg ; color a . b += db ; color a . a += da ; color b . r -= dr ; color b . g -= dg ; color b . b -= db ; color b . a -= da ; } } } <SENTENCE_END/>


Original Name solve,zombie

solve

<SENTENCE_START> { int new count = 0 ; int [ ] new indices = new int [ m count ] ; for ( int i = 0 ; i < m count ; i ++ ) { int flags = m flags buffer . data [ i ] ; if ( ( flags & particle type . b 2 zombie particle ) != 0 ) { particle destruction listener destruction listener = m world . get particle destruction listener ( ) ; if ( ( flags & particle type . b 2 destruction listener ) != 0 && destruction listener != null ) { destruction listener . say goodbye ( i ) ; } new indices [ i ] = settings . invalid particle index ; } else { new indices [ i ] = new count ; if ( i != new count ) { m flags buffer . data [ new count ] = m flags buffer . data [ i ] ; m position buffer . data [ new count ] . set ( m position buffer . data [ i ] ) ; m velocity buffer . data [ new count ] . set ( m velocity buffer . data [ i ] ) ; m group buffer [ new count ] = m group buffer [ i ] ; if ( m depth buffer != null ) { m depth buffer [ new count ] = m depth buffer [ i ] ; } if ( m color buffer . data != null ) { m color buffer . data [ new count ] . set ( m color buffer . data [ i ] ) ; } if ( m user data buffer . data != null ) { m user data buffer . data [ new count ] = m user data buffer . data [ i ] ; } } new count ++ ; } } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices [ proxy . index ] ; } int j = m proxy count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is proxy invalid ( m proxy buffer [ i ] ) ) { -- j ; proxy temp = m proxy buffer [ j ] ; m proxy buffer [ j ] = m proxy buffer [ i ] ; m proxy buffer [ i ] = temp ; -- i ; } } m proxy count = j ; for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices [ contact . index a ] ; contact . index b = new indices [ contact . index b ] ; } j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is contact invalid ( m contact buffer [ i ] ) ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices [ contact . index ] ; } j = m body contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is body contact invalid ( m body contact buffer [ i ] ) ) { -- j ; particle body contact temp = m body contact buffer [ j ] ; m body contact buffer [ j ] = m body contact buffer [ i ] ; m body contact buffer [ i ] = temp ; -- i ; } } m body contact count = j ; for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices [ pair . index a ] ; pair . index b = new indices [ pair . index b ] ; } j = m pair count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is pair invalid ( m pair buffer [ i ] ) ) { -- j ; pair temp = m pair buffer [ j ] ; m pair buffer [ j ] = m pair buffer [ i ] ; m pair buffer [ i ] = temp ; -- i ; } } m pair count = j ; for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices [ triad . index a ] ; triad . index b = new indices [ triad . index b ] ; triad . index c = new indices [ triad . index c ] ; } j = m triad count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is triad invalid ( m triad buffer [ i ] ) ) { -- j ; triad temp = m triad buffer [ j ] ; m triad buffer [ j ] = m triad buffer [ i ] ; m triad buffer [ i ] = temp ; -- i ; } } m triad count = j ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { int first index = new count ; int last index = 0 ; boolean modified = false ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { j = new indices [ i ] ; if ( j >= 0 ) { first index = math utils . min ( first index , j ) ; last index = math utils . max ( last index , j + 1 ) ; } else { modified = true ; } } if ( first index < last index ) { group . m first index = first index ; group . m last index = last index ; if ( modified ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . m to be split = true ; } } } else { group . m first index = 0 ; group . m last index = 0 ; if ( group . m destroy automatically ) { group . m to be destroyed = true ; } } } m count = new count ; for ( particle group group = m group list ; group != null ; ) { particle group next = group . get next ( ) ; if ( group . m to be destroyed ) { destroy particle group ( group ) ; } else if ( group . m to be split ) { } group = next ; } } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { int new count = 0 ; int [ ] new indices = new int [ m count ] ; for ( int i = 0 ; i < m count ; i ++ ) { int flags = m flags buffer . data [ i ] ; if ( ( flags & particle type . b 2 zombie particle ) != 0 ) { particle destruction listener destruction listener = m world . get particle destruction listener ( ) ; if ( ( flags & particle type . b 2 destruction listener ) != 0 && destruction listener != null ) { destruction listener . say goodbye ( i ) ; } new indices [ i ] = settings . invalid particle index ; } else { new indices [ i ] = new count ; if ( i != new count ) { m flags buffer . data [ new count ] = m flags buffer . data [ i ] ; m position buffer . data [ new count ] . set ( m position buffer . data [ i ] ) ; m velocity buffer . data [ new count ] . set ( m velocity buffer . data [ i ] ) ; m group buffer [ new count ] = m group buffer [ i ] ; if ( m depth buffer != null ) { m depth buffer [ new count ] = m depth buffer [ i ] ; } if ( m color buffer . data != null ) { m color buffer . data [ new count ] . set ( m color buffer . data [ i ] ) ; } if ( m user data buffer . data != null ) { m user data buffer . data [ new count ] = m user data buffer . data [ i ] ; } } new count ++ ; } } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices [ proxy . index ] ; } int j = m proxy count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is proxy invalid ( m proxy buffer [ i ] ) ) { -- j ; proxy temp = m proxy buffer [ j ] ; m proxy buffer [ j ] = m proxy buffer [ i ] ; m proxy buffer [ i ] = temp ; -- i ; } } m proxy count = j ; for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices [ contact . index a ] ; contact . index b = new indices [ contact . index b ] ; } j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is contact invalid ( m contact buffer [ i ] ) ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices [ contact . index ] ; } j = m body contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is body contact invalid ( m body contact buffer [ i ] ) ) { -- j ; particle body contact temp = m body contact buffer [ j ] ; m body contact buffer [ j ] = m body contact buffer [ i ] ; m body contact buffer [ i ] = temp ; -- i ; } } m body contact count = j ; for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices [ pair . index a ] ; pair . index b = new indices [ pair . index b ] ; } j = m pair count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is pair invalid ( m pair buffer [ i ] ) ) { -- j ; pair temp = m pair buffer [ j ] ; m pair buffer [ j ] = m pair buffer [ i ] ; m pair buffer [ i ] = temp ; -- i ; } } m pair count = j ; for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices [ triad . index a ] ; triad . index b = new indices [ triad . index b ] ; triad . index c = new indices [ triad . index c ] ; } j = m triad count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is triad invalid ( m triad buffer [ i ] ) ) { -- j ; triad temp = m triad buffer [ j ] ; m triad buffer [ j ] = m triad buffer [ i ] ; m triad buffer [ i ] = temp ; -- i ; } } m triad count = j ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { int first index = new count ; int last index = 0 ; boolean modified = false ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { j = new indices [ i ] ; if ( j >= 0 ) { first index = math utils . min ( first index , j ) ; last index = math utils . max ( last index , j + 1 ) ; } else { modified = true ; } } if ( first index < last index ) { group . m first index = first index ; group . m last index = last index ; if ( modified ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . m to be split = true ; } } } else { group . m first index = 0 ; group . m last index = 0 ; if ( group . m destroy automatically ) { group . m to be destroyed = true ; } } } m count = new count ; for ( particle group group = m group list ; group != null ; ) { particle group next = group . get next ( ) ; if ( group . m to be destroyed ) { destroy particle group ( group ) ; } else if ( group . m to be split ) { } group = next ; } } <SENTENCE_END/>

zombie

<SENTENCE_START> { int new count = 0 ; int [ ] new indices = new int [ m count ] ; for ( int i = 0 ; i < m count ; i ++ ) { int flags = m flags buffer . data [ i ] ; if ( ( flags & particle type . b 2 zombie particle ) != 0 ) { particle destruction listener destruction listener = m world . get particle destruction listener ( ) ; if ( ( flags & particle type . b 2 destruction listener ) != 0 && destruction listener != null ) { destruction listener . say goodbye ( i ) ; } new indices [ i ] = settings . invalid particle index ; } else { new indices [ i ] = new count ; if ( i != new count ) { m flags buffer . data [ new count ] = m flags buffer . data [ i ] ; m position buffer . data [ new count ] . set ( m position buffer . data [ i ] ) ; m velocity buffer . data [ new count ] . set ( m velocity buffer . data [ i ] ) ; m group buffer [ new count ] = m group buffer [ i ] ; if ( m depth buffer != null ) { m depth buffer [ new count ] = m depth buffer [ i ] ; } if ( m color buffer . data != null ) { m color buffer . data [ new count ] . set ( m color buffer . data [ i ] ) ; } if ( m user data buffer . data != null ) { m user data buffer . data [ new count ] = m user data buffer . data [ i ] ; } } new count ++ ; } } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices [ proxy . index ] ; } int j = m proxy count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is proxy invalid ( m proxy buffer [ i ] ) ) { -- j ; proxy temp = m proxy buffer [ j ] ; m proxy buffer [ j ] = m proxy buffer [ i ] ; m proxy buffer [ i ] = temp ; -- i ; } } m proxy count = j ; for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices [ contact . index a ] ; contact . index b = new indices [ contact . index b ] ; } j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is contact invalid ( m contact buffer [ i ] ) ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices [ contact . index ] ; } j = m body contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is body contact invalid ( m body contact buffer [ i ] ) ) { -- j ; particle body contact temp = m body contact buffer [ j ] ; m body contact buffer [ j ] = m body contact buffer [ i ] ; m body contact buffer [ i ] = temp ; -- i ; } } m body contact count = j ; for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices [ pair . index a ] ; pair . index b = new indices [ pair . index b ] ; } j = m pair count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is pair invalid ( m pair buffer [ i ] ) ) { -- j ; pair temp = m pair buffer [ j ] ; m pair buffer [ j ] = m pair buffer [ i ] ; m pair buffer [ i ] = temp ; -- i ; } } m pair count = j ; for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices [ triad . index a ] ; triad . index b = new indices [ triad . index b ] ; triad . index c = new indices [ triad . index c ] ; } j = m triad count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is triad invalid ( m triad buffer [ i ] ) ) { -- j ; triad temp = m triad buffer [ j ] ; m triad buffer [ j ] = m triad buffer [ i ] ; m triad buffer [ i ] = temp ; -- i ; } } m triad count = j ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { int first index = new count ; int last index = 0 ; boolean modified = false ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { j = new indices [ i ] ; if ( j >= 0 ) { first index = math utils . min ( first index , j ) ; last index = math utils . max ( last index , j + 1 ) ; } else { modified = true ; } } if ( first index < last index ) { group . m first index = first index ; group . m last index = last index ; if ( modified ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . m to be split = true ; } } } else { group . m first index = 0 ; group . m last index = 0 ; if ( group . m destroy automatically ) { group . m to be destroyed = true ; } } } m count = new count ; for ( particle group group = m group list ; group != null ; ) { particle group next = group . get next ( ) ; if ( group . m to be destroyed ) { destroy particle group ( group ) ; } else if ( group . m to be split ) { } group = next ; } } <SENTENCE_END/>

(Copy Probability: 10.3%)

<SENTENCE_START> { int new count = 0 ; int [ ] new indices = new int [ m count ] ; for ( int i = 0 ; i < m count ; i ++ ) { int flags = m flags buffer . data [ i ] ; if ( ( flags & particle type . b 2 zombie particle ) != 0 ) { particle destruction listener destruction listener = m world . get particle destruction listener ( ) ; if ( ( flags & particle type . b 2 destruction listener ) != 0 && destruction listener != null ) { destruction listener . say goodbye ( i ) ; } new indices [ i ] = settings . invalid particle index ; } else { new indices [ i ] = new count ; if ( i != new count ) { m flags buffer . data [ new count ] = m flags buffer . data [ i ] ; m position buffer . data [ new count ] . set ( m position buffer . data [ i ] ) ; m velocity buffer . data [ new count ] . set ( m velocity buffer . data [ i ] ) ; m group buffer [ new count ] = m group buffer [ i ] ; if ( m depth buffer != null ) { m depth buffer [ new count ] = m depth buffer [ i ] ; } if ( m color buffer . data != null ) { m color buffer . data [ new count ] . set ( m color buffer . data [ i ] ) ; } if ( m user data buffer . data != null ) { m user data buffer . data [ new count ] = m user data buffer . data [ i ] ; } } new count ++ ; } } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices [ proxy . index ] ; } int j = m proxy count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is proxy invalid ( m proxy buffer [ i ] ) ) { -- j ; proxy temp = m proxy buffer [ j ] ; m proxy buffer [ j ] = m proxy buffer [ i ] ; m proxy buffer [ i ] = temp ; -- i ; } } m proxy count = j ; for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices [ contact . index a ] ; contact . index b = new indices [ contact . index b ] ; } j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is contact invalid ( m contact buffer [ i ] ) ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices [ contact . index ] ; } j = m body contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is body contact invalid ( m body contact buffer [ i ] ) ) { -- j ; particle body contact temp = m body contact buffer [ j ] ; m body contact buffer [ j ] = m body contact buffer [ i ] ; m body contact buffer [ i ] = temp ; -- i ; } } m body contact count = j ; for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices [ pair . index a ] ; pair . index b = new indices [ pair . index b ] ; } j = m pair count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is pair invalid ( m pair buffer [ i ] ) ) { -- j ; pair temp = m pair buffer [ j ] ; m pair buffer [ j ] = m pair buffer [ i ] ; m pair buffer [ i ] = temp ; -- i ; } } m pair count = j ; for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices [ triad . index a ] ; triad . index b = new indices [ triad . index b ] ; triad . index c = new indices [ triad . index c ] ; } j = m triad count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is triad invalid ( m triad buffer [ i ] ) ) { -- j ; triad temp = m triad buffer [ j ] ; m triad buffer [ j ] = m triad buffer [ i ] ; m triad buffer [ i ] = temp ; -- i ; } } m triad count = j ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { int first index = new count ; int last index = 0 ; boolean modified = false ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { j = new indices [ i ] ; if ( j >= 0 ) { first index = math utils . min ( first index , j ) ; last index = math utils . max ( last index , j + 1 ) ; } else { modified = true ; } } if ( first index < last index ) { group . m first index = first index ; group . m last index = last index ; if ( modified ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . m to be split = true ; } } } else { group . m first index = 0 ; group . m last index = 0 ; if ( group . m destroy automatically ) { group . m to be destroyed = true ; } } } m count = new count ; for ( particle group group = m group list ; group != null ; ) { particle group next = group . get next ( ) ; if ( group . m to be destroyed ) { destroy particle group ( group ) ; } else if ( group . m to be split ) { } group = next ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { int new count = 0 ; int [ ] new indices = new int [ m count ] ; for ( int i = 0 ; i < m count ; i ++ ) { int flags = m flags buffer . data [ i ] ; if ( ( flags & particle type . b 2 zombie particle ) != 0 ) { particle destruction listener destruction listener = m world . get particle destruction listener ( ) ; if ( ( flags & particle type . b 2 destruction listener ) != 0 && destruction listener != null ) { destruction listener . say goodbye ( i ) ; } new indices [ i ] = settings . invalid particle index ; } else { new indices [ i ] = new count ; if ( i != new count ) { m flags buffer . data [ new count ] = m flags buffer . data [ i ] ; m position buffer . data [ new count ] . set ( m position buffer . data [ i ] ) ; m velocity buffer . data [ new count ] . set ( m velocity buffer . data [ i ] ) ; m group buffer [ new count ] = m group buffer [ i ] ; if ( m depth buffer != null ) { m depth buffer [ new count ] = m depth buffer [ i ] ; } if ( m color buffer . data != null ) { m color buffer . data [ new count ] . set ( m color buffer . data [ i ] ) ; } if ( m user data buffer . data != null ) { m user data buffer . data [ new count ] = m user data buffer . data [ i ] ; } } new count ++ ; } } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices [ proxy . index ] ; } int j = m proxy count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is proxy invalid ( m proxy buffer [ i ] ) ) { -- j ; proxy temp = m proxy buffer [ j ] ; m proxy buffer [ j ] = m proxy buffer [ i ] ; m proxy buffer [ i ] = temp ; -- i ; } } m proxy count = j ; for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices [ contact . index a ] ; contact . index b = new indices [ contact . index b ] ; } j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is contact invalid ( m contact buffer [ i ] ) ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices [ contact . index ] ; } j = m body contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is body contact invalid ( m body contact buffer [ i ] ) ) { -- j ; particle body contact temp = m body contact buffer [ j ] ; m body contact buffer [ j ] = m body contact buffer [ i ] ; m body contact buffer [ i ] = temp ; -- i ; } } m body contact count = j ; for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices [ pair . index a ] ; pair . index b = new indices [ pair . index b ] ; } j = m pair count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is pair invalid ( m pair buffer [ i ] ) ) { -- j ; pair temp = m pair buffer [ j ] ; m pair buffer [ j ] = m pair buffer [ i ] ; m pair buffer [ i ] = temp ; -- i ; } } m pair count = j ; for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices [ triad . index a ] ; triad . index b = new indices [ triad . index b ] ; triad . index c = new indices [ triad . index c ] ; } j = m triad count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is triad invalid ( m triad buffer [ i ] ) ) { -- j ; triad temp = m triad buffer [ j ] ; m triad buffer [ j ] = m triad buffer [ i ] ; m triad buffer [ i ] = temp ; -- i ; } } m triad count = j ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { int first index = new count ; int last index = 0 ; boolean modified = false ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { j = new indices [ i ] ; if ( j >= 0 ) { first index = math utils . min ( first index , j ) ; last index = math utils . max ( last index , j + 1 ) ; } else { modified = true ; } } if ( first index < last index ) { group . m first index = first index ; group . m last index = last index ; if ( modified ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . m to be split = true ; } } } else { group . m first index = 0 ; group . m last index = 0 ; if ( group . m destroy automatically ) { group . m to be destroyed = true ; } } } m count = new count ; for ( particle group group = m group list ; group != null ; ) { particle group next = group . get next ( ) ; if ( group . m to be destroyed ) { destroy particle group ( group ) ; } else if ( group . m to be split ) { } group = next ; } } <SENTENCE_END/>

(Copy Probability: 8.1%)

<SENTENCE_START> { int new count = 0 ; int [ ] new indices = new int [ m count ] ; for ( int i = 0 ; i < m count ; i ++ ) { int flags = m flags buffer . data [ i ] ; if ( ( flags & particle type . b 2 zombie particle ) != 0 ) { particle destruction listener destruction listener = m world . get particle destruction listener ( ) ; if ( ( flags & particle type . b 2 destruction listener ) != 0 && destruction listener != null ) { destruction listener . say goodbye ( i ) ; } new indices [ i ] = settings . invalid particle index ; } else { new indices [ i ] = new count ; if ( i != new count ) { m flags buffer . data [ new count ] = m flags buffer . data [ i ] ; m position buffer . data [ new count ] . set ( m position buffer . data [ i ] ) ; m velocity buffer . data [ new count ] . set ( m velocity buffer . data [ i ] ) ; m group buffer [ new count ] = m group buffer [ i ] ; if ( m depth buffer != null ) { m depth buffer [ new count ] = m depth buffer [ i ] ; } if ( m color buffer . data != null ) { m color buffer . data [ new count ] . set ( m color buffer . data [ i ] ) ; } if ( m user data buffer . data != null ) { m user data buffer . data [ new count ] = m user data buffer . data [ i ] ; } } new count ++ ; } } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices [ proxy . index ] ; } int j = m proxy count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is proxy invalid ( m proxy buffer [ i ] ) ) { -- j ; proxy temp = m proxy buffer [ j ] ; m proxy buffer [ j ] = m proxy buffer [ i ] ; m proxy buffer [ i ] = temp ; -- i ; } } m proxy count = j ; for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices [ contact . index a ] ; contact . index b = new indices [ contact . index b ] ; } j = m contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is contact invalid ( m contact buffer [ i ] ) ) { -- j ; particle contact temp = m contact buffer [ j ] ; m contact buffer [ j ] = m contact buffer [ i ] ; m contact buffer [ i ] = temp ; -- i ; } } m contact count = j ; for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices [ contact . index ] ; } j = m body contact count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is body contact invalid ( m body contact buffer [ i ] ) ) { -- j ; particle body contact temp = m body contact buffer [ j ] ; m body contact buffer [ j ] = m body contact buffer [ i ] ; m body contact buffer [ i ] = temp ; -- i ; } } m body contact count = j ; for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices [ pair . index a ] ; pair . index b = new indices [ pair . index b ] ; } j = m pair count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is pair invalid ( m pair buffer [ i ] ) ) { -- j ; pair temp = m pair buffer [ j ] ; m pair buffer [ j ] = m pair buffer [ i ] ; m pair buffer [ i ] = temp ; -- i ; } } m pair count = j ; for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices [ triad . index a ] ; triad . index b = new indices [ triad . index b ] ; triad . index c = new indices [ triad . index c ] ; } j = m triad count ; for ( int i = 0 ; i < j ; i ++ ) { if ( test . is triad invalid ( m triad buffer [ i ] ) ) { -- j ; triad temp = m triad buffer [ j ] ; m triad buffer [ j ] = m triad buffer [ i ] ; m triad buffer [ i ] = temp ; -- i ; } } m triad count = j ; for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { int first index = new count ; int last index = 0 ; boolean modified = false ; for ( int i = group . m first index ; i < group . m last index ; i ++ ) { j = new indices [ i ] ; if ( j >= 0 ) { first index = math utils . min ( first index , j ) ; last index = math utils . max ( last index , j + 1 ) ; } else { modified = true ; } } if ( first index < last index ) { group . m first index = first index ; group . m last index = last index ; if ( modified ) { if ( ( group . m group flags & particle group type . b 2 rigid particle group ) != 0 ) { group . m to be split = true ; } } } else { group . m first index = 0 ; group . m last index = 0 ; if ( group . m destroy automatically ) { group . m to be destroyed = true ; } } } m count = new count ; for ( particle group group = m group list ; group != null ; ) { particle group next = group . get next ( ) ; if ( group . m to be destroyed ) { destroy particle group ( group ) ; } else if ( group . m to be split ) { } group = next ; } } <SENTENCE_END/>


Original Name get,index

get

<SENTENCE_START> { if ( i < start ) { return i ; } else if ( i < mid ) { return i + end - mid ; } else if ( i < end ) { return i + start - mid ; } else { return i ; } } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { if ( i < start ) { return i ; } else if ( i < mid ) { return i + end - mid ; } else if ( i < end ) { return i + start - mid ; } else { return i ; } } <SENTENCE_END/>

index

<SENTENCE_START> { if ( i < start ) { return i ; } else if ( i < mid ) { return i + end - mid ; } else if ( i < end ) { return i + start - mid ; } else { return i ; } } <SENTENCE_END/>

(Copy Probability: 16.0%)

<SENTENCE_START> { if ( i < start ) { return i ; } else if ( i < mid ) { return i + end - mid ; } else if ( i < end ) { return i + start - mid ; } else { return i ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( i < start ) { return i ; } else if ( i < mid ) { return i + end - mid ; } else if ( i < end ) { return i + start - mid ; } else { return i ; } } <SENTENCE_END/>

(Copy Probability: 17.6%)

<SENTENCE_START> { if ( i < start ) { return i ; } else if ( i < mid ) { return i + end - mid ; } else if ( i < end ) { return i + start - mid ; } else { return i ; } } <SENTENCE_END/>


Original Name rotate,buffer

rotate

<SENTENCE_START> { if ( start == mid || mid == end ) { return ; } new indices . start = start ; new indices . mid = mid ; new indices . end = end ; buffer utils . rotate ( m flags buffer . data , start , mid , end ) ; buffer utils . rotate ( m position buffer . data , start , mid , end ) ; buffer utils . rotate ( m velocity buffer . data , start , mid , end ) ; buffer utils . rotate ( m group buffer , start , mid , end ) ; if ( m depth buffer != null ) { buffer utils . rotate ( m depth buffer , start , mid , end ) ; } if ( m color buffer . data != null ) { buffer utils . rotate ( m color buffer . data , start , mid , end ) ; } if ( m user data buffer . data != null ) { buffer utils . rotate ( m user data buffer . data , start , mid , end ) ; } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices . get index ( proxy . index ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices . get index ( contact . index a ) ; contact . index b = new indices . get index ( contact . index b ) ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices . get index ( contact . index ) ; } for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices . get index ( pair . index a ) ; pair . index b = new indices . get index ( pair . index b ) ; } for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices . get index ( triad . index a ) ; triad . index b = new indices . get index ( triad . index b ) ; triad . index c = new indices . get index ( triad . index c ) ; } for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { group . m first index = new indices . get index ( group . m first index ) ; group . m last index = new indices . get index ( group . m last index - 1 ) + 1 ; } } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { if ( start == mid || mid == end ) { return ; } new indices . start = start ; new indices . mid = mid ; new indices . end = end ; buffer utils . rotate ( m flags buffer . data , start , mid , end ) ; buffer utils . rotate ( m position buffer . data , start , mid , end ) ; buffer utils . rotate ( m velocity buffer . data , start , mid , end ) ; buffer utils . rotate ( m group buffer , start , mid , end ) ; if ( m depth buffer != null ) { buffer utils . rotate ( m depth buffer , start , mid , end ) ; } if ( m color buffer . data != null ) { buffer utils . rotate ( m color buffer . data , start , mid , end ) ; } if ( m user data buffer . data != null ) { buffer utils . rotate ( m user data buffer . data , start , mid , end ) ; } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices . get index ( proxy . index ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices . get index ( contact . index a ) ; contact . index b = new indices . get index ( contact . index b ) ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices . get index ( contact . index ) ; } for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices . get index ( pair . index a ) ; pair . index b = new indices . get index ( pair . index b ) ; } for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices . get index ( triad . index a ) ; triad . index b = new indices . get index ( triad . index b ) ; triad . index c = new indices . get index ( triad . index c ) ; } for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { group . m first index = new indices . get index ( group . m first index ) ; group . m last index = new indices . get index ( group . m last index - 1 ) + 1 ; } } <SENTENCE_END/>

buffer

<SENTENCE_START> { if ( start == mid || mid == end ) { return ; } new indices . start = start ; new indices . mid = mid ; new indices . end = end ; buffer utils . rotate ( m flags buffer . data , start , mid , end ) ; buffer utils . rotate ( m position buffer . data , start , mid , end ) ; buffer utils . rotate ( m velocity buffer . data , start , mid , end ) ; buffer utils . rotate ( m group buffer , start , mid , end ) ; if ( m depth buffer != null ) { buffer utils . rotate ( m depth buffer , start , mid , end ) ; } if ( m color buffer . data != null ) { buffer utils . rotate ( m color buffer . data , start , mid , end ) ; } if ( m user data buffer . data != null ) { buffer utils . rotate ( m user data buffer . data , start , mid , end ) ; } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices . get index ( proxy . index ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices . get index ( contact . index a ) ; contact . index b = new indices . get index ( contact . index b ) ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices . get index ( contact . index ) ; } for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices . get index ( pair . index a ) ; pair . index b = new indices . get index ( pair . index b ) ; } for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices . get index ( triad . index a ) ; triad . index b = new indices . get index ( triad . index b ) ; triad . index c = new indices . get index ( triad . index c ) ; } for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { group . m first index = new indices . get index ( group . m first index ) ; group . m last index = new indices . get index ( group . m last index - 1 ) + 1 ; } } <SENTENCE_END/>

(Copy Probability: 10.4%)

<SENTENCE_START> { if ( start == mid || mid == end ) { return ; } new indices . start = start ; new indices . mid = mid ; new indices . end = end ; buffer utils . rotate ( m flags buffer . data , start , mid , end ) ; buffer utils . rotate ( m position buffer . data , start , mid , end ) ; buffer utils . rotate ( m velocity buffer . data , start , mid , end ) ; buffer utils . rotate ( m group buffer , start , mid , end ) ; if ( m depth buffer != null ) { buffer utils . rotate ( m depth buffer , start , mid , end ) ; } if ( m color buffer . data != null ) { buffer utils . rotate ( m color buffer . data , start , mid , end ) ; } if ( m user data buffer . data != null ) { buffer utils . rotate ( m user data buffer . data , start , mid , end ) ; } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices . get index ( proxy . index ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices . get index ( contact . index a ) ; contact . index b = new indices . get index ( contact . index b ) ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices . get index ( contact . index ) ; } for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices . get index ( pair . index a ) ; pair . index b = new indices . get index ( pair . index b ) ; } for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices . get index ( triad . index a ) ; triad . index b = new indices . get index ( triad . index b ) ; triad . index c = new indices . get index ( triad . index c ) ; } for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { group . m first index = new indices . get index ( group . m first index ) ; group . m last index = new indices . get index ( group . m last index - 1 ) + 1 ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( start == mid || mid == end ) { return ; } new indices . start = start ; new indices . mid = mid ; new indices . end = end ; buffer utils . rotate ( m flags buffer . data , start , mid , end ) ; buffer utils . rotate ( m position buffer . data , start , mid , end ) ; buffer utils . rotate ( m velocity buffer . data , start , mid , end ) ; buffer utils . rotate ( m group buffer , start , mid , end ) ; if ( m depth buffer != null ) { buffer utils . rotate ( m depth buffer , start , mid , end ) ; } if ( m color buffer . data != null ) { buffer utils . rotate ( m color buffer . data , start , mid , end ) ; } if ( m user data buffer . data != null ) { buffer utils . rotate ( m user data buffer . data , start , mid , end ) ; } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices . get index ( proxy . index ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices . get index ( contact . index a ) ; contact . index b = new indices . get index ( contact . index b ) ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices . get index ( contact . index ) ; } for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices . get index ( pair . index a ) ; pair . index b = new indices . get index ( pair . index b ) ; } for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices . get index ( triad . index a ) ; triad . index b = new indices . get index ( triad . index b ) ; triad . index c = new indices . get index ( triad . index c ) ; } for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { group . m first index = new indices . get index ( group . m first index ) ; group . m last index = new indices . get index ( group . m last index - 1 ) + 1 ; } } <SENTENCE_END/>

(Copy Probability: 13.3%)

<SENTENCE_START> { if ( start == mid || mid == end ) { return ; } new indices . start = start ; new indices . mid = mid ; new indices . end = end ; buffer utils . rotate ( m flags buffer . data , start , mid , end ) ; buffer utils . rotate ( m position buffer . data , start , mid , end ) ; buffer utils . rotate ( m velocity buffer . data , start , mid , end ) ; buffer utils . rotate ( m group buffer , start , mid , end ) ; if ( m depth buffer != null ) { buffer utils . rotate ( m depth buffer , start , mid , end ) ; } if ( m color buffer . data != null ) { buffer utils . rotate ( m color buffer . data , start , mid , end ) ; } if ( m user data buffer . data != null ) { buffer utils . rotate ( m user data buffer . data , start , mid , end ) ; } for ( int k = 0 ; k < m proxy count ; k ++ ) { proxy proxy = m proxy buffer [ k ] ; proxy . index = new indices . get index ( proxy . index ) ; } for ( int k = 0 ; k < m contact count ; k ++ ) { particle contact contact = m contact buffer [ k ] ; contact . index a = new indices . get index ( contact . index a ) ; contact . index b = new indices . get index ( contact . index b ) ; } for ( int k = 0 ; k < m body contact count ; k ++ ) { particle body contact contact = m body contact buffer [ k ] ; contact . index = new indices . get index ( contact . index ) ; } for ( int k = 0 ; k < m pair count ; k ++ ) { pair pair = m pair buffer [ k ] ; pair . index a = new indices . get index ( pair . index a ) ; pair . index b = new indices . get index ( pair . index b ) ; } for ( int k = 0 ; k < m triad count ; k ++ ) { triad triad = m triad buffer [ k ] ; triad . index a = new indices . get index ( triad . index a ) ; triad . index b = new indices . get index ( triad . index b ) ; triad . index c = new indices . get index ( triad . index c ) ; } for ( particle group group = m group list ; group != null ; group = group . get next ( ) ) { group . m first index = new indices . get index ( group . m first index ) ; group . m last index = new indices . get index ( group . m last index - 1 ) + 1 ; } } <SENTENCE_END/>


Original Name set,particle,radius

set

<SENTENCE_START> { m particle diameter = 2 * radius ; m squared diameter = m particle diameter * m particle diameter ; m inverse diameter = 1 / m particle diameter ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { m particle diameter = 2 * radius ; m squared diameter = m particle diameter * m particle diameter ; m inverse diameter = 1 / m particle diameter ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m particle diameter = 2 * radius ; m squared diameter = m particle diameter * m particle diameter ; m inverse diameter = 1 / m particle diameter ; } <SENTENCE_END/>

(Copy Probability: 49.2%)

<SENTENCE_START> { m particle diameter = 2 * radius ; m squared diameter = m particle diameter * m particle diameter ; m inverse diameter = 1 / m particle diameter ; } <SENTENCE_END/>

radius

<SENTENCE_START> { m particle diameter = 2 * radius ; m squared diameter = m particle diameter * m particle diameter ; m inverse diameter = 1 / m particle diameter ; } <SENTENCE_END/>

(Copy Probability: 11.7%)

<SENTENCE_START> { m particle diameter = 2 * radius ; m squared diameter = m particle diameter * m particle diameter ; m inverse diameter = 1 / m particle diameter ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m particle diameter = 2 * radius ; m squared diameter = m particle diameter * m particle diameter ; m inverse diameter = 1 / m particle diameter ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { m particle diameter = 2 * radius ; m squared diameter = m particle diameter * m particle diameter ; m inverse diameter = 1 / m particle diameter ; } <SENTENCE_END/>


Original Name set,particle,density

set

<SENTENCE_START> { m density = density ; m inverse density = 1 / m density ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { m density = density ; m inverse density = 1 / m density ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m density = density ; m inverse density = 1 / m density ; } <SENTENCE_END/>

(Copy Probability: 19.7%)

<SENTENCE_START> { m density = density ; m inverse density = 1 / m density ; } <SENTENCE_END/>

density

<SENTENCE_START> { m density = density ; m inverse density = 1 / m density ; } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { m density = density ; m inverse density = 1 / m density ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m density = density ; m inverse density = 1 / m density ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { m density = density ; m inverse density = 1 / m density ; } <SENTENCE_END/>


Original Name get,particle,density

get

<SENTENCE_START> { return m density ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m density ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m density ; } <SENTENCE_END/>

(Copy Probability: 86.5%)

<SENTENCE_START> { return m density ; } <SENTENCE_END/>

density

<SENTENCE_START> { return m density ; } <SENTENCE_END/>

(Copy Probability: 40.0%)

<SENTENCE_START> { return m density ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m density ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { return m density ; } <SENTENCE_END/>


Original Name set,particle,gravity,scale

set

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

(Copy Probability: 99.0%)

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

gravity

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

(Copy Probability: 85.0%)

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

scale

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>

(Copy Probability: 4.5%)

<SENTENCE_START> { m gravity scale = gravity scale ; } <SENTENCE_END/>


Original Name get,particle,gravity,scale

get

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

(Copy Probability: 98.6%)

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

gravity

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

(Copy Probability: 91.7%)

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

scale

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

(Copy Probability: 1.6%)

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { return m gravity scale ; } <SENTENCE_END/>


Original Name set,particle,damping

set

<SENTENCE_START> { m damping strength = damping ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { m damping strength = damping ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m damping strength = damping ; } <SENTENCE_END/>

(Copy Probability: 96.2%)

<SENTENCE_START> { m damping strength = damping ; } <SENTENCE_END/>

damping

<SENTENCE_START> { m damping strength = damping ; } <SENTENCE_END/>

(Copy Probability: 18.0%)

<SENTENCE_START> { m damping strength = damping ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m damping strength = damping ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { m damping strength = damping ; } <SENTENCE_END/>


Original Name get,particle,damping

get

<SENTENCE_START> { return m damping strength ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m damping strength ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m damping strength ; } <SENTENCE_END/>

(Copy Probability: 83.2%)

<SENTENCE_START> { return m damping strength ; } <SENTENCE_END/>

damping

<SENTENCE_START> { return m damping strength ; } <SENTENCE_END/>

(Copy Probability: 67.4%)

<SENTENCE_START> { return m damping strength ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m damping strength ; } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { return m damping strength ; } <SENTENCE_END/>


Original Name get,particle,radius

get

<SENTENCE_START> { return m particle diameter / 2 ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { return m particle diameter / 2 ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m particle diameter / 2 ; } <SENTENCE_END/>

(Copy Probability: 81.6%)

<SENTENCE_START> { return m particle diameter / 2 ; } <SENTENCE_END/>

radius

<SENTENCE_START> { return m particle diameter / 2 ; } <SENTENCE_END/>

(Copy Probability: 80.9%)

<SENTENCE_START> { return m particle diameter / 2 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle diameter / 2 ; } <SENTENCE_END/>

(Copy Probability: 6.5%)

<SENTENCE_START> { return m particle diameter / 2 ; } <SENTENCE_END/>


Original Name get,critical,velocity

get

<SENTENCE_START> { return m particle diameter * step . inv dt ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { return m particle diameter * step . inv dt ; } <SENTENCE_END/>

critical

<SENTENCE_START> { return m particle diameter * step . inv dt ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return m particle diameter * step . inv dt ; } <SENTENCE_END/>

velocity

<SENTENCE_START> { return m particle diameter * step . inv dt ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { return m particle diameter * step . inv dt ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m particle diameter * step . inv dt ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { return m particle diameter * step . inv dt ; } <SENTENCE_END/>


Original Name get,critical,velocity,squared

get

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

critical

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

(Copy Probability: 49.1%)

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

velocity

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

(Copy Probability: 20.6%)

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

squared

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

(Copy Probability: 5.2%)

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { float velocity = get critical velocity ( step ) ; return velocity * velocity ; } <SENTENCE_END/>


Original Name get,critical,pressure

get

<SENTENCE_START> { return m density * get critical velocity squared ( step ) ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { return m density * get critical velocity squared ( step ) ; } <SENTENCE_END/>

critical

<SENTENCE_START> { return m density * get critical velocity squared ( step ) ; } <SENTENCE_END/>

(Copy Probability: 51.9%)

<SENTENCE_START> { return m density * get critical velocity squared ( step ) ; } <SENTENCE_END/>

pressure

<SENTENCE_START> { return m density * get critical velocity squared ( step ) ; } <SENTENCE_END/>

(Copy Probability: 74.9%)

<SENTENCE_START> { return m density * get critical velocity squared ( step ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m density * get critical velocity squared ( step ) ; } <SENTENCE_END/>

(Copy Probability: 17.7%)

<SENTENCE_START> { return m density * get critical velocity squared ( step ) ; } <SENTENCE_END/>


Original Name get,particle,stride

get

<SENTENCE_START> { return settings . particle stride * m particle diameter ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { return settings . particle stride * m particle diameter ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return settings . particle stride * m particle diameter ; } <SENTENCE_END/>

(Copy Probability: 51.5%)

<SENTENCE_START> { return settings . particle stride * m particle diameter ; } <SENTENCE_END/>

stride

<SENTENCE_START> { return settings . particle stride * m particle diameter ; } <SENTENCE_END/>

(Copy Probability: 27.8%)

<SENTENCE_START> { return settings . particle stride * m particle diameter ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return settings . particle stride * m particle diameter ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { return settings . particle stride * m particle diameter ; } <SENTENCE_END/>


Original Name get,particle,mass

get

<SENTENCE_START> { float stride = get particle stride ( ) ; return m density * stride * stride ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { float stride = get particle stride ( ) ; return m density * stride * stride ; } <SENTENCE_END/>

particle

<SENTENCE_START> { float stride = get particle stride ( ) ; return m density * stride * stride ; } <SENTENCE_END/>

(Copy Probability: 17.3%)

<SENTENCE_START> { float stride = get particle stride ( ) ; return m density * stride * stride ; } <SENTENCE_END/>

mass

<SENTENCE_START> { float stride = get particle stride ( ) ; return m density * stride * stride ; } <SENTENCE_END/>

(Copy Probability: 8.2%)

<SENTENCE_START> { float stride = get particle stride ( ) ; return m density * stride * stride ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { float stride = get particle stride ( ) ; return m density * stride * stride ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { float stride = get particle stride ( ) ; return m density * stride * stride ; } <SENTENCE_END/>


Original Name get,particle,inv,mass

get

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

inv

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

mass

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { return 1.777777f * m inverse density * m inverse diameter * m inverse diameter ; } <SENTENCE_END/>


Original Name get,particle,flags,buffer

get

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

(Copy Probability: 83.3%)

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

flags

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

(Copy Probability: 32.3%)

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>

(Copy Probability: 1.8%)

<SENTENCE_START> { return m flags buffer . data ; } <SENTENCE_END/>


Original Name get,particle,position,buffer

get

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

(Copy Probability: 23.7%)

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

position

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

(Copy Probability: 11.4%)

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>

(Copy Probability: 2.7%)

<SENTENCE_START> { return m position buffer . data ; } <SENTENCE_END/>


Original Name get,particle,velocity,buffer

get

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

(Copy Probability: 80.7%)

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

velocity

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

(Copy Probability: 44.9%)

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>

(Copy Probability: 1.3%)

<SENTENCE_START> { return m velocity buffer . data ; } <SENTENCE_END/>


Original Name get,particle,color,buffer

get

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

(Copy Probability: 1.6%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

(Copy Probability: 89.9%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

color

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

(Copy Probability: 95.1%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

(Copy Probability: 32.3%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>

(Copy Probability: 5.7%)

<SENTENCE_START> { m color buffer . data = request particle buffer ( particle color . class , m color buffer . data ) ; return m color buffer . data ; } <SENTENCE_END/>


Original Name get,particle,user,data,buffer

get

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

particle

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

(Copy Probability: 84.2%)

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

user

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

(Copy Probability: 96.6%)

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

data

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

(Copy Probability: 34.9%)

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

(Copy Probability: 6.5%)

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>

(Copy Probability: 5.3%)

<SENTENCE_START> { m user data buffer . data = request particle buffer ( object . class , m user data buffer . data ) ; return m user data buffer . data ; } <SENTENCE_END/>


Original Name get,particle,max,count

get

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

(Copy Probability: 94.7%)

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

max

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

(Copy Probability: 98.2%)

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

count

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

(Copy Probability: 11.7%)

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>

(Copy Probability: 4.3%)

<SENTENCE_START> { return m max count ; } <SENTENCE_END/>


Original Name set,particle,max,count

set

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

particle

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

(Copy Probability: 23.7%)

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

max

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

(Copy Probability: 19.7%)

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

count

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

(Copy Probability: 1.6%)

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>

(Copy Probability: 2.0%)

<SENTENCE_START> { assert ( m count <= count ) ; m max count = count ; } <SENTENCE_END/>


Original Name set,particle,buffer

set

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

particle

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

(Copy Probability: 54.5%)

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

(Copy Probability: 28.2%)

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

(Copy Probability: 4.8%)

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>


Original Name set,particle,buffer

set

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

particle

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

(Copy Probability: 54.5%)

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

(Copy Probability: 28.2%)

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>

(Copy Probability: 4.8%)

<SENTENCE_START> { assert ( ( new data != null && new capacity != 0 ) || ( new data == null && new capacity == 0 ) ) ; if ( buffer . user supplied capacity != 0 ) { } buffer . data = new data ; buffer . user supplied capacity = new capacity ; } <SENTENCE_END/>


Original Name set,particle,flags,buffer

set

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 86.7%)

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

flags

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 92.5%)

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 21.7%)

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { set particle buffer ( m flags buffer , buffer , capacity ) ; } <SENTENCE_END/>


Original Name set,particle,position,buffer

set

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 92.2%)

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

position

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 86.9%)

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 11.5%)

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { set particle buffer ( m position buffer , buffer , capacity ) ; } <SENTENCE_END/>


Original Name set,particle,velocity,buffer

set

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 92.0%)

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

velocity

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 94.6%)

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 23.1%)

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 4.3%)

<SENTENCE_START> { set particle buffer ( m velocity buffer , buffer , capacity ) ; } <SENTENCE_END/>


Original Name set,particle,color,buffer

set

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 88.4%)

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

color

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 94.0%)

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 26.8%)

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { set particle buffer ( m color buffer , buffer , capacity ) ; } <SENTENCE_END/>


Original Name get,particle,group,buffer

get

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 96.7%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

group

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 98.2%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 11.8%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>


Original Name get,particle,group,count

get

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

(Copy Probability: 93.3%)

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

group

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

(Copy Probability: 97.6%)

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

count

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

(Copy Probability: 11.8%)

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>

(Copy Probability: 4.3%)

<SENTENCE_START> { return m group count ; } <SENTENCE_END/>


Original Name get,particle,group,list

get

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 96.7%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

group

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 98.2%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

list

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 11.8%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { return m group buffer ; } <SENTENCE_END/>


Original Name get,particle,count

get

<SENTENCE_START> { return m count ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return m count ; } <SENTENCE_END/>

particle

<SENTENCE_START> { return m count ; } <SENTENCE_END/>

(Copy Probability: 37.6%)

<SENTENCE_START> { return m count ; } <SENTENCE_END/>

count

<SENTENCE_START> { return m count ; } <SENTENCE_END/>

(Copy Probability: 29.7%)

<SENTENCE_START> { return m count ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return m count ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return m count ; } <SENTENCE_END/>


Original Name set,particle,user,data,buffer

set

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

particle

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 97.6%)

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

user

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 98.8%)

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

data

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 37.7%)

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 6.2%)

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>

(Copy Probability: 4.8%)

<SENTENCE_START> { set particle buffer ( m user data buffer , buffer , capacity ) ; } <SENTENCE_END/>


Original Name lower,bound

lower

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag < tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag < tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

bound

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag < tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag < tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag < tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag < tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>


Original Name upper,bound

upper

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag <= tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag <= tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

bound

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag <= tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag <= tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag <= tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { int left = 0 ; int step , curr ; while ( length > 0 ) { step = length / 2 ; curr = left + step ; if ( ray [ curr ] . tag <= tag ) { left = curr + 1 ; length -= step + 1 ; } else { length = step ; } } return left ; } <SENTENCE_END/>


Original Name query,aabb

query

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } final float lower bound x = aabb . lower bound . x ; final float lower bound y = aabb . lower bound . y ; final float upper bound x = aabb . upper bound . x ; final float upper bound y = aabb . upper bound . y ; int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * lower bound x , m inverse diameter * lower bound y ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * upper bound x , m inverse diameter * upper bound y ) ) ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 p = m position buffer . data [ i ] ; if ( lower bound x < p . x && p . x < upper bound x && lower bound y < p . y && p . y < upper bound y ) { if ( ! callback . report particle ( i ) ) { break ; } } } } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } final float lower bound x = aabb . lower bound . x ; final float lower bound y = aabb . lower bound . y ; final float upper bound x = aabb . upper bound . x ; final float upper bound y = aabb . upper bound . y ; int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * lower bound x , m inverse diameter * lower bound y ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * upper bound x , m inverse diameter * upper bound y ) ) ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 p = m position buffer . data [ i ] ; if ( lower bound x < p . x && p . x < upper bound x && lower bound y < p . y && p . y < upper bound y ) { if ( ! callback . report particle ( i ) ) { break ; } } } } <SENTENCE_END/>

aabb

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } final float lower bound x = aabb . lower bound . x ; final float lower bound y = aabb . lower bound . y ; final float upper bound x = aabb . upper bound . x ; final float upper bound y = aabb . upper bound . y ; int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * lower bound x , m inverse diameter * lower bound y ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * upper bound x , m inverse diameter * upper bound y ) ) ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 p = m position buffer . data [ i ] ; if ( lower bound x < p . x && p . x < upper bound x && lower bound y < p . y && p . y < upper bound y ) { if ( ! callback . report particle ( i ) ) { break ; } } } } <SENTENCE_END/>

(Copy Probability: 18.1%)

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } final float lower bound x = aabb . lower bound . x ; final float lower bound y = aabb . lower bound . y ; final float upper bound x = aabb . upper bound . x ; final float upper bound y = aabb . upper bound . y ; int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * lower bound x , m inverse diameter * lower bound y ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * upper bound x , m inverse diameter * upper bound y ) ) ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 p = m position buffer . data [ i ] ; if ( lower bound x < p . x && p . x < upper bound x && lower bound y < p . y && p . y < upper bound y ) { if ( ! callback . report particle ( i ) ) { break ; } } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } final float lower bound x = aabb . lower bound . x ; final float lower bound y = aabb . lower bound . y ; final float upper bound x = aabb . upper bound . x ; final float upper bound y = aabb . upper bound . y ; int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * lower bound x , m inverse diameter * lower bound y ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * upper bound x , m inverse diameter * upper bound y ) ) ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 p = m position buffer . data [ i ] ; if ( lower bound x < p . x && p . x < upper bound x && lower bound y < p . y && p . y < upper bound y ) { if ( ! callback . report particle ( i ) ) { break ; } } } } <SENTENCE_END/>

(Copy Probability: 8.7%)

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } final float lower bound x = aabb . lower bound . x ; final float lower bound y = aabb . lower bound . y ; final float upper bound x = aabb . upper bound . x ; final float upper bound y = aabb . upper bound . y ; int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * lower bound x , m inverse diameter * lower bound y ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * upper bound x , m inverse diameter * upper bound y ) ) ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 p = m position buffer . data [ i ] ; if ( lower bound x < p . x && p . x < upper bound x && lower bound y < p . y && p . y < upper bound y ) { if ( ! callback . report particle ( i ) ) { break ; } } } } <SENTENCE_END/>


Original Name raycast

raycast

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * math utils . min ( point 1 . x , point 2 . x ) - 1 , m inverse diameter * math utils . min ( point 1 . y , point 2 . y ) - 1 ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * math utils . max ( point 1 . x , point 2 . x ) + 1 , m inverse diameter * math utils . max ( point 1 . y , point 2 . y ) + 1 ) ) ; float fraction = 1 ; final float vx = point 2 . x - point 1 . x ; final float vy = point 2 . y - point 1 . y ; float v 2 = vx * vx + vy * vy ; if ( v 2 == 0 ) v 2 = float . max value ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 pos i = m position buffer . data [ i ] ; final float px = point 1 . x - pos i . x ; final float py = point 1 . y - pos i . y ; float pv = px * vx + py * vy ; float p 2 = px * px + py * py ; float determinant = pv * pv - v 2 * ( p 2 - m squared diameter ) ; if ( determinant >= 0 ) { float sqrt determinant = math utils . sqrt ( determinant ) ; float t = ( - pv - sqrt determinant ) / v 2 ; if ( t > fraction ) { continue ; } if ( t < 0 ) { t = ( - pv + sqrt determinant ) / v 2 ; if ( t < 0 || t > fraction ) { continue ; } } final vec 2 n = temp vec ; temp vec . x = px + t * vx ; temp vec . y = py + t * vy ; n . normalize ( ) ; final vec 2 point = temp vec 2 ; point . x = point 1 . x + t * vx ; point . y = point 1 . y + t * vy ; float f = callback . report particle ( i , point , n , t ) ; fraction = math utils . min ( fraction , f ) ; if ( fraction <= 0 ) { break ; } } } } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * math utils . min ( point 1 . x , point 2 . x ) - 1 , m inverse diameter * math utils . min ( point 1 . y , point 2 . y ) - 1 ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * math utils . max ( point 1 . x , point 2 . x ) + 1 , m inverse diameter * math utils . max ( point 1 . y , point 2 . y ) + 1 ) ) ; float fraction = 1 ; final float vx = point 2 . x - point 1 . x ; final float vy = point 2 . y - point 1 . y ; float v 2 = vx * vx + vy * vy ; if ( v 2 == 0 ) v 2 = float . max value ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 pos i = m position buffer . data [ i ] ; final float px = point 1 . x - pos i . x ; final float py = point 1 . y - pos i . y ; float pv = px * vx + py * vy ; float p 2 = px * px + py * py ; float determinant = pv * pv - v 2 * ( p 2 - m squared diameter ) ; if ( determinant >= 0 ) { float sqrt determinant = math utils . sqrt ( determinant ) ; float t = ( - pv - sqrt determinant ) / v 2 ; if ( t > fraction ) { continue ; } if ( t < 0 ) { t = ( - pv + sqrt determinant ) / v 2 ; if ( t < 0 || t > fraction ) { continue ; } } final vec 2 n = temp vec ; temp vec . x = px + t * vx ; temp vec . y = py + t * vy ; n . normalize ( ) ; final vec 2 point = temp vec 2 ; point . x = point 1 . x + t * vx ; point . y = point 1 . y + t * vy ; float f = callback . report particle ( i , point , n , t ) ; fraction = math utils . min ( fraction , f ) ; if ( fraction <= 0 ) { break ; } } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * math utils . min ( point 1 . x , point 2 . x ) - 1 , m inverse diameter * math utils . min ( point 1 . y , point 2 . y ) - 1 ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * math utils . max ( point 1 . x , point 2 . x ) + 1 , m inverse diameter * math utils . max ( point 1 . y , point 2 . y ) + 1 ) ) ; float fraction = 1 ; final float vx = point 2 . x - point 1 . x ; final float vy = point 2 . y - point 1 . y ; float v 2 = vx * vx + vy * vy ; if ( v 2 == 0 ) v 2 = float . max value ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 pos i = m position buffer . data [ i ] ; final float px = point 1 . x - pos i . x ; final float py = point 1 . y - pos i . y ; float pv = px * vx + py * vy ; float p 2 = px * px + py * py ; float determinant = pv * pv - v 2 * ( p 2 - m squared diameter ) ; if ( determinant >= 0 ) { float sqrt determinant = math utils . sqrt ( determinant ) ; float t = ( - pv - sqrt determinant ) / v 2 ; if ( t > fraction ) { continue ; } if ( t < 0 ) { t = ( - pv + sqrt determinant ) / v 2 ; if ( t < 0 || t > fraction ) { continue ; } } final vec 2 n = temp vec ; temp vec . x = px + t * vx ; temp vec . y = py + t * vy ; n . normalize ( ) ; final vec 2 point = temp vec 2 ; point . x = point 1 . x + t * vx ; point . y = point 1 . y + t * vy ; float f = callback . report particle ( i , point , n , t ) ; fraction = math utils . min ( fraction , f ) ; if ( fraction <= 0 ) { break ; } } } } <SENTENCE_END/>

(Copy Probability: 12.8%)

<SENTENCE_START> { if ( m proxy count == 0 ) { return ; } int first proxy = lower bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * math utils . min ( point 1 . x , point 2 . x ) - 1 , m inverse diameter * math utils . min ( point 1 . y , point 2 . y ) - 1 ) ) ; int last proxy = upper bound ( m proxy buffer , m proxy count , compute tag ( m inverse diameter * math utils . max ( point 1 . x , point 2 . x ) + 1 , m inverse diameter * math utils . max ( point 1 . y , point 2 . y ) + 1 ) ) ; float fraction = 1 ; final float vx = point 2 . x - point 1 . x ; final float vy = point 2 . y - point 1 . y ; float v 2 = vx * vx + vy * vy ; if ( v 2 == 0 ) v 2 = float . max value ; for ( int proxy = first proxy ; proxy < last proxy ; ++ proxy ) { int i = m proxy buffer [ proxy ] . index ; final vec 2 pos i = m position buffer . data [ i ] ; final float px = point 1 . x - pos i . x ; final float py = point 1 . y - pos i . y ; float pv = px * vx + py * vy ; float p 2 = px * px + py * py ; float determinant = pv * pv - v 2 * ( p 2 - m squared diameter ) ; if ( determinant >= 0 ) { float sqrt determinant = math utils . sqrt ( determinant ) ; float t = ( - pv - sqrt determinant ) / v 2 ; if ( t > fraction ) { continue ; } if ( t < 0 ) { t = ( - pv + sqrt determinant ) / v 2 ; if ( t < 0 || t > fraction ) { continue ; } } final vec 2 n = temp vec ; temp vec . x = px + t * vx ; temp vec . y = py + t * vy ; n . normalize ( ) ; final vec 2 point = temp vec 2 ; point . x = point 1 . x + t * vx ; point . y = point 1 . y + t * vy ; float f = callback . report particle ( i , point , n , t ) ; fraction = math utils . min ( fraction , f ) ; if ( fraction <= 0 ) { break ; } } } } <SENTENCE_END/>


Original Name compute,particle,collision,energy

compute

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

particle

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

(Copy Probability: 9.2%)

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

collision

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

(Copy Probability: 10.2%)

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

energy

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

(Copy Probability: 5.4%)

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>

(Copy Probability: 4.6%)

<SENTENCE_START> { float sum v 2 = 0 ; for ( int k = 0 ; k < m contact count ; k ++ ) { final particle contact contact = m contact buffer [ k ] ; int a = contact . index a ; int b = contact . index b ; vec 2 n = contact . normal ; final vec 2 va = m velocity buffer . data [ a ] ; final vec 2 vb = m velocity buffer . data [ b ] ; final float vx = vb . x - va . x ; final float vy = vb . y - va . y ; float vn = vx * n . x + vy * n . y ; if ( vn < 0 ) { sum v 2 += vn * vn ; } } return 0.5f * get particle mass ( ) * sum v 2 ; } <SENTENCE_END/>


Original Name reallocate,buffer

reallocate

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data class , buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data class , buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data class , buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

(Copy Probability: 17.8%)

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data class , buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data class , buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

(Copy Probability: 3.9%)

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data class , buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>


Original Name reallocate,buffer

reallocate

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

(Copy Probability: 11.7%)

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { assert ( new capacity > old capacity ) ; return buffer utils . %SELF% ( buffer . data , buffer . user supplied capacity , old capacity , new capacity , deferred ) ; } <SENTENCE_END/>


Original Name request,particle,buffer

request

<SENTENCE_START> { if ( buffer == null ) { buffer = ( t [ ] ) array reflection . new instance ( klass , m internal allocated capacity ) ; for ( int i = 0 ; i < m internal allocated capacity ; i ++ ) { try { buffer [ i ] = class reflection . new instance ( klass ) ; } catch ( exception e ) { throw new runtime exception ( e ) ; } } } return buffer ; } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { if ( buffer == null ) { buffer = ( t [ ] ) array reflection . new instance ( klass , m internal allocated capacity ) ; for ( int i = 0 ; i < m internal allocated capacity ; i ++ ) { try { buffer [ i ] = class reflection . new instance ( klass ) ; } catch ( exception e ) { throw new runtime exception ( e ) ; } } } return buffer ; } <SENTENCE_END/>

particle

<SENTENCE_START> { if ( buffer == null ) { buffer = ( t [ ] ) array reflection . new instance ( klass , m internal allocated capacity ) ; for ( int i = 0 ; i < m internal allocated capacity ; i ++ ) { try { buffer [ i ] = class reflection . new instance ( klass ) ; } catch ( exception e ) { throw new runtime exception ( e ) ; } } } return buffer ; } <SENTENCE_END/>

(Copy Probability: 25.8%)

<SENTENCE_START> { if ( buffer == null ) { buffer = ( t [ ] ) array reflection . new instance ( klass , m internal allocated capacity ) ; for ( int i = 0 ; i < m internal allocated capacity ; i ++ ) { try { buffer [ i ] = class reflection . new instance ( klass ) ; } catch ( exception e ) { throw new runtime exception ( e ) ; } } } return buffer ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { if ( buffer == null ) { buffer = ( t [ ] ) array reflection . new instance ( klass , m internal allocated capacity ) ; for ( int i = 0 ; i < m internal allocated capacity ; i ++ ) { try { buffer [ i ] = class reflection . new instance ( klass ) ; } catch ( exception e ) { throw new runtime exception ( e ) ; } } } return buffer ; } <SENTENCE_END/>

(Copy Probability: 15.2%)

<SENTENCE_START> { if ( buffer == null ) { buffer = ( t [ ] ) array reflection . new instance ( klass , m internal allocated capacity ) ; for ( int i = 0 ; i < m internal allocated capacity ; i ++ ) { try { buffer [ i ] = class reflection . new instance ( klass ) ; } catch ( exception e ) { throw new runtime exception ( e ) ; } } } return buffer ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( buffer == null ) { buffer = ( t [ ] ) array reflection . new instance ( klass , m internal allocated capacity ) ; for ( int i = 0 ; i < m internal allocated capacity ; i ++ ) { try { buffer [ i ] = class reflection . new instance ( klass ) ; } catch ( exception e ) { throw new runtime exception ( e ) ; } } } return buffer ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { if ( buffer == null ) { buffer = ( t [ ] ) array reflection . new instance ( klass , m internal allocated capacity ) ; for ( int i = 0 ; i < m internal allocated capacity ; i ++ ) { try { buffer [ i ] = class reflection . new instance ( klass ) ; } catch ( exception e ) { throw new runtime exception ( e ) ; } } } return buffer ; } <SENTENCE_END/>


Original Name request,particle,buffer

request

<SENTENCE_START> { if ( buffer == null ) { buffer = new float [ m internal allocated capacity ] ; } return buffer ; } <SENTENCE_END/>

(Copy Probability: 1.8%)

<SENTENCE_START> { if ( buffer == null ) { buffer = new float [ m internal allocated capacity ] ; } return buffer ; } <SENTENCE_END/>

particle

<SENTENCE_START> { if ( buffer == null ) { buffer = new float [ m internal allocated capacity ] ; } return buffer ; } <SENTENCE_END/>

(Copy Probability: 77.7%)

<SENTENCE_START> { if ( buffer == null ) { buffer = new float [ m internal allocated capacity ] ; } return buffer ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { if ( buffer == null ) { buffer = new float [ m internal allocated capacity ] ; } return buffer ; } <SENTENCE_END/>

(Copy Probability: 35.2%)

<SENTENCE_START> { if ( buffer == null ) { buffer = new float [ m internal allocated capacity ] ; } return buffer ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( buffer == null ) { buffer = new float [ m internal allocated capacity ] ; } return buffer ; } <SENTENCE_END/>

(Copy Probability: 6.3%)

<SENTENCE_START> { if ( buffer == null ) { buffer = new float [ m internal allocated capacity ] ; } return buffer ; } <SENTENCE_END/>


Original Name callback

callback

<SENTENCE_START> { final vec 2 pa = system . m position buffer . data [ a ] ; final vec 2 pb = system . m position buffer . data [ b ] ; final vec 2 pc = system . m position buffer . data [ c ] ; final float dabx = pa . x - pb . x ; final float daby = pa . y - pb . y ; final float dbcx = pb . x - pc . x ; final float dbcy = pb . y - pc . y ; final float dcax = pc . x - pa . x ; final float dcay = pc . y - pa . y ; float max distance squared = settings . max triad distance squared * system . m squared diameter ; if ( dabx * dabx + daby * daby < max distance squared && dbcx * dbcx + dbcy * dbcy < max distance squared && dcax * dcax + dcay * dcay < max distance squared ) { if ( system . m triad count >= system . m triad capacity ) { int old capacity = system . m triad capacity ; int new capacity = system . m triad count != 0 ? 2 * system . m triad count : settings . min particle buffer capacity ; system . m triad buffer = buffer utils . reallocate buffer ( triad . class , system . m triad buffer , old capacity , new capacity ) ; system . m triad capacity = new capacity ; } triad triad = system . m triad buffer [ system . m triad count ] ; triad . index a = a ; triad . index b = b ; triad . index c = c ; triad . flags = system . m flags buffer . data [ a ] | system . m flags buffer . data [ b ] | system . m flags buffer . data [ c ] ; triad . strength = def . strength ; final float mid pointx = ( float ) 1 / 3 * ( pa . x + pb . x + pc . x ) ; final float mid pointy = ( float ) 1 / 3 * ( pa . y + pb . y + pc . y ) ; triad . pa . x = pa . x - mid pointx ; triad . pa . y = pa . y - mid pointy ; triad . pb . x = pb . x - mid pointx ; triad . pb . y = pb . y - mid pointy ; triad . pc . x = pc . x - mid pointx ; triad . pc . y = pc . y - mid pointy ; triad . ka = - ( dcax * dabx + dcay * daby ) ; triad . kb = - ( dabx * dbcx + daby * dbcy ) ; triad . kc = - ( dbcx * dcax + dbcy * dcay ) ; triad . s = vec 2 . cross ( pa , pb ) + vec 2 . cross ( pb , pc ) + vec 2 . cross ( pc , pa ) ; system . m triad count ++ ; } } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { final vec 2 pa = system . m position buffer . data [ a ] ; final vec 2 pb = system . m position buffer . data [ b ] ; final vec 2 pc = system . m position buffer . data [ c ] ; final float dabx = pa . x - pb . x ; final float daby = pa . y - pb . y ; final float dbcx = pb . x - pc . x ; final float dbcy = pb . y - pc . y ; final float dcax = pc . x - pa . x ; final float dcay = pc . y - pa . y ; float max distance squared = settings . max triad distance squared * system . m squared diameter ; if ( dabx * dabx + daby * daby < max distance squared && dbcx * dbcx + dbcy * dbcy < max distance squared && dcax * dcax + dcay * dcay < max distance squared ) { if ( system . m triad count >= system . m triad capacity ) { int old capacity = system . m triad capacity ; int new capacity = system . m triad count != 0 ? 2 * system . m triad count : settings . min particle buffer capacity ; system . m triad buffer = buffer utils . reallocate buffer ( triad . class , system . m triad buffer , old capacity , new capacity ) ; system . m triad capacity = new capacity ; } triad triad = system . m triad buffer [ system . m triad count ] ; triad . index a = a ; triad . index b = b ; triad . index c = c ; triad . flags = system . m flags buffer . data [ a ] | system . m flags buffer . data [ b ] | system . m flags buffer . data [ c ] ; triad . strength = def . strength ; final float mid pointx = ( float ) 1 / 3 * ( pa . x + pb . x + pc . x ) ; final float mid pointy = ( float ) 1 / 3 * ( pa . y + pb . y + pc . y ) ; triad . pa . x = pa . x - mid pointx ; triad . pa . y = pa . y - mid pointy ; triad . pb . x = pb . x - mid pointx ; triad . pb . y = pb . y - mid pointy ; triad . pc . x = pc . x - mid pointx ; triad . pc . y = pc . y - mid pointy ; triad . ka = - ( dcax * dabx + dcay * daby ) ; triad . kb = - ( dabx * dbcx + daby * dbcy ) ; triad . kc = - ( dbcx * dcax + dbcy * dcay ) ; triad . s = vec 2 . cross ( pa , pb ) + vec 2 . cross ( pb , pc ) + vec 2 . cross ( pc , pa ) ; system . m triad count ++ ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { final vec 2 pa = system . m position buffer . data [ a ] ; final vec 2 pb = system . m position buffer . data [ b ] ; final vec 2 pc = system . m position buffer . data [ c ] ; final float dabx = pa . x - pb . x ; final float daby = pa . y - pb . y ; final float dbcx = pb . x - pc . x ; final float dbcy = pb . y - pc . y ; final float dcax = pc . x - pa . x ; final float dcay = pc . y - pa . y ; float max distance squared = settings . max triad distance squared * system . m squared diameter ; if ( dabx * dabx + daby * daby < max distance squared && dbcx * dbcx + dbcy * dbcy < max distance squared && dcax * dcax + dcay * dcay < max distance squared ) { if ( system . m triad count >= system . m triad capacity ) { int old capacity = system . m triad capacity ; int new capacity = system . m triad count != 0 ? 2 * system . m triad count : settings . min particle buffer capacity ; system . m triad buffer = buffer utils . reallocate buffer ( triad . class , system . m triad buffer , old capacity , new capacity ) ; system . m triad capacity = new capacity ; } triad triad = system . m triad buffer [ system . m triad count ] ; triad . index a = a ; triad . index b = b ; triad . index c = c ; triad . flags = system . m flags buffer . data [ a ] | system . m flags buffer . data [ b ] | system . m flags buffer . data [ c ] ; triad . strength = def . strength ; final float mid pointx = ( float ) 1 / 3 * ( pa . x + pb . x + pc . x ) ; final float mid pointy = ( float ) 1 / 3 * ( pa . y + pb . y + pc . y ) ; triad . pa . x = pa . x - mid pointx ; triad . pa . y = pa . y - mid pointy ; triad . pb . x = pb . x - mid pointx ; triad . pb . y = pb . y - mid pointy ; triad . pc . x = pc . x - mid pointx ; triad . pc . y = pc . y - mid pointy ; triad . ka = - ( dcax * dabx + dcay * daby ) ; triad . kb = - ( dabx * dbcx + daby * dbcy ) ; triad . kc = - ( dbcx * dcax + dbcy * dcay ) ; triad . s = vec 2 . cross ( pa , pb ) + vec 2 . cross ( pb , pc ) + vec 2 . cross ( pc , pa ) ; system . m triad count ++ ; } } <SENTENCE_END/>

(Copy Probability: 5.9%)

<SENTENCE_START> { final vec 2 pa = system . m position buffer . data [ a ] ; final vec 2 pb = system . m position buffer . data [ b ] ; final vec 2 pc = system . m position buffer . data [ c ] ; final float dabx = pa . x - pb . x ; final float daby = pa . y - pb . y ; final float dbcx = pb . x - pc . x ; final float dbcy = pb . y - pc . y ; final float dcax = pc . x - pa . x ; final float dcay = pc . y - pa . y ; float max distance squared = settings . max triad distance squared * system . m squared diameter ; if ( dabx * dabx + daby * daby < max distance squared && dbcx * dbcx + dbcy * dbcy < max distance squared && dcax * dcax + dcay * dcay < max distance squared ) { if ( system . m triad count >= system . m triad capacity ) { int old capacity = system . m triad capacity ; int new capacity = system . m triad count != 0 ? 2 * system . m triad count : settings . min particle buffer capacity ; system . m triad buffer = buffer utils . reallocate buffer ( triad . class , system . m triad buffer , old capacity , new capacity ) ; system . m triad capacity = new capacity ; } triad triad = system . m triad buffer [ system . m triad count ] ; triad . index a = a ; triad . index b = b ; triad . index c = c ; triad . flags = system . m flags buffer . data [ a ] | system . m flags buffer . data [ b ] | system . m flags buffer . data [ c ] ; triad . strength = def . strength ; final float mid pointx = ( float ) 1 / 3 * ( pa . x + pb . x + pc . x ) ; final float mid pointy = ( float ) 1 / 3 * ( pa . y + pb . y + pc . y ) ; triad . pa . x = pa . x - mid pointx ; triad . pa . y = pa . y - mid pointy ; triad . pb . x = pb . x - mid pointx ; triad . pb . y = pb . y - mid pointy ; triad . pc . x = pc . x - mid pointx ; triad . pc . y = pc . y - mid pointy ; triad . ka = - ( dcax * dabx + dcay * daby ) ; triad . kb = - ( dabx * dbcx + daby * dbcy ) ; triad . kc = - ( dbcx * dcax + dbcy * dcay ) ; triad . s = vec 2 . cross ( pa , pb ) + vec 2 . cross ( pb , pc ) + vec 2 . cross ( pc , pa ) ; system . m triad count ++ ; } } <SENTENCE_END/>


Original Name callback

callback

<SENTENCE_START> { int count a = ( ( a < group b . m first index ) ? 1 : 0 ) + ( ( b < group b . m first index ) ? 1 : 0 ) + ( ( c < group b . m first index ) ? 1 : 0 ) ; if ( count a > 0 && count a < 3 ) { int af = system . m flags buffer . data [ a ] ; int bf = system . m flags buffer . data [ b ] ; int cf = system . m flags buffer . data [ c ] ; if ( ( af & bf & cf & k triad flags ) != 0 ) { final vec 2 pa = system . m position buffer . data [ a ] ; final vec 2 pb = system . m position buffer . data [ b ] ; final vec 2 pc = system . m position buffer . data [ c ] ; final float dabx = pa . x - pb . x ; final float daby = pa . y - pb . y ; final float dbcx = pb . x - pc . x ; final float dbcy = pb . y - pc . y ; final float dcax = pc . x - pa . x ; final float dcay = pc . y - pa . y ; float max distance squared = settings . max triad distance squared * system . m squared diameter ; if ( dabx * dabx + daby * daby < max distance squared && dbcx * dbcx + dbcy * dbcy < max distance squared && dcax * dcax + dcay * dcay < max distance squared ) { if ( system . m triad count >= system . m triad capacity ) { int old capacity = system . m triad capacity ; int new capacity = system . m triad count != 0 ? 2 * system . m triad count : settings . min particle buffer capacity ; system . m triad buffer = buffer utils . reallocate buffer ( triad . class , system . m triad buffer , old capacity , new capacity ) ; system . m triad capacity = new capacity ; } triad triad = system . m triad buffer [ system . m triad count ] ; triad . index a = a ; triad . index b = b ; triad . index c = c ; triad . flags = af | bf | cf ; triad . strength = math utils . min ( group a . m strength , group b . m strength ) ; final float mid pointx = ( float ) 1 / 3 * ( pa . x + pb . x + pc . x ) ; final float mid pointy = ( float ) 1 / 3 * ( pa . y + pb . y + pc . y ) ; triad . pa . x = pa . x - mid pointx ; triad . pa . y = pa . y - mid pointy ; triad . pb . x = pb . x - mid pointx ; triad . pb . y = pb . y - mid pointy ; triad . pc . x = pc . x - mid pointx ; triad . pc . y = pc . y - mid pointy ; triad . ka = - ( dcax * dabx + dcay * daby ) ; triad . kb = - ( dabx * dbcx + daby * dbcy ) ; triad . kc = - ( dbcx * dcax + dbcy * dcay ) ; triad . s = vec 2 . cross ( pa , pb ) + vec 2 . cross ( pb , pc ) + vec 2 . cross ( pc , pa ) ; system . m triad count ++ ; } } } } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { int count a = ( ( a < group b . m first index ) ? 1 : 0 ) + ( ( b < group b . m first index ) ? 1 : 0 ) + ( ( c < group b . m first index ) ? 1 : 0 ) ; if ( count a > 0 && count a < 3 ) { int af = system . m flags buffer . data [ a ] ; int bf = system . m flags buffer . data [ b ] ; int cf = system . m flags buffer . data [ c ] ; if ( ( af & bf & cf & k triad flags ) != 0 ) { final vec 2 pa = system . m position buffer . data [ a ] ; final vec 2 pb = system . m position buffer . data [ b ] ; final vec 2 pc = system . m position buffer . data [ c ] ; final float dabx = pa . x - pb . x ; final float daby = pa . y - pb . y ; final float dbcx = pb . x - pc . x ; final float dbcy = pb . y - pc . y ; final float dcax = pc . x - pa . x ; final float dcay = pc . y - pa . y ; float max distance squared = settings . max triad distance squared * system . m squared diameter ; if ( dabx * dabx + daby * daby < max distance squared && dbcx * dbcx + dbcy * dbcy < max distance squared && dcax * dcax + dcay * dcay < max distance squared ) { if ( system . m triad count >= system . m triad capacity ) { int old capacity = system . m triad capacity ; int new capacity = system . m triad count != 0 ? 2 * system . m triad count : settings . min particle buffer capacity ; system . m triad buffer = buffer utils . reallocate buffer ( triad . class , system . m triad buffer , old capacity , new capacity ) ; system . m triad capacity = new capacity ; } triad triad = system . m triad buffer [ system . m triad count ] ; triad . index a = a ; triad . index b = b ; triad . index c = c ; triad . flags = af | bf | cf ; triad . strength = math utils . min ( group a . m strength , group b . m strength ) ; final float mid pointx = ( float ) 1 / 3 * ( pa . x + pb . x + pc . x ) ; final float mid pointy = ( float ) 1 / 3 * ( pa . y + pb . y + pc . y ) ; triad . pa . x = pa . x - mid pointx ; triad . pa . y = pa . y - mid pointy ; triad . pb . x = pb . x - mid pointx ; triad . pb . y = pb . y - mid pointy ; triad . pc . x = pc . x - mid pointx ; triad . pc . y = pc . y - mid pointy ; triad . ka = - ( dcax * dabx + dcay * daby ) ; triad . kb = - ( dabx * dbcx + daby * dbcy ) ; triad . kc = - ( dbcx * dcax + dbcy * dcay ) ; triad . s = vec 2 . cross ( pa , pb ) + vec 2 . cross ( pb , pc ) + vec 2 . cross ( pc , pa ) ; system . m triad count ++ ; } } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { int count a = ( ( a < group b . m first index ) ? 1 : 0 ) + ( ( b < group b . m first index ) ? 1 : 0 ) + ( ( c < group b . m first index ) ? 1 : 0 ) ; if ( count a > 0 && count a < 3 ) { int af = system . m flags buffer . data [ a ] ; int bf = system . m flags buffer . data [ b ] ; int cf = system . m flags buffer . data [ c ] ; if ( ( af & bf & cf & k triad flags ) != 0 ) { final vec 2 pa = system . m position buffer . data [ a ] ; final vec 2 pb = system . m position buffer . data [ b ] ; final vec 2 pc = system . m position buffer . data [ c ] ; final float dabx = pa . x - pb . x ; final float daby = pa . y - pb . y ; final float dbcx = pb . x - pc . x ; final float dbcy = pb . y - pc . y ; final float dcax = pc . x - pa . x ; final float dcay = pc . y - pa . y ; float max distance squared = settings . max triad distance squared * system . m squared diameter ; if ( dabx * dabx + daby * daby < max distance squared && dbcx * dbcx + dbcy * dbcy < max distance squared && dcax * dcax + dcay * dcay < max distance squared ) { if ( system . m triad count >= system . m triad capacity ) { int old capacity = system . m triad capacity ; int new capacity = system . m triad count != 0 ? 2 * system . m triad count : settings . min particle buffer capacity ; system . m triad buffer = buffer utils . reallocate buffer ( triad . class , system . m triad buffer , old capacity , new capacity ) ; system . m triad capacity = new capacity ; } triad triad = system . m triad buffer [ system . m triad count ] ; triad . index a = a ; triad . index b = b ; triad . index c = c ; triad . flags = af | bf | cf ; triad . strength = math utils . min ( group a . m strength , group b . m strength ) ; final float mid pointx = ( float ) 1 / 3 * ( pa . x + pb . x + pc . x ) ; final float mid pointy = ( float ) 1 / 3 * ( pa . y + pb . y + pc . y ) ; triad . pa . x = pa . x - mid pointx ; triad . pa . y = pa . y - mid pointy ; triad . pb . x = pb . x - mid pointx ; triad . pb . y = pb . y - mid pointy ; triad . pc . x = pc . x - mid pointx ; triad . pc . y = pc . y - mid pointy ; triad . ka = - ( dcax * dabx + dcay * daby ) ; triad . kb = - ( dabx * dbcx + daby * dbcy ) ; triad . kc = - ( dbcx * dcax + dbcy * dcay ) ; triad . s = vec 2 . cross ( pa , pb ) + vec 2 . cross ( pb , pc ) + vec 2 . cross ( pc , pa ) ; system . m triad count ++ ; } } } } <SENTENCE_END/>

(Copy Probability: 5.7%)

<SENTENCE_START> { int count a = ( ( a < group b . m first index ) ? 1 : 0 ) + ( ( b < group b . m first index ) ? 1 : 0 ) + ( ( c < group b . m first index ) ? 1 : 0 ) ; if ( count a > 0 && count a < 3 ) { int af = system . m flags buffer . data [ a ] ; int bf = system . m flags buffer . data [ b ] ; int cf = system . m flags buffer . data [ c ] ; if ( ( af & bf & cf & k triad flags ) != 0 ) { final vec 2 pa = system . m position buffer . data [ a ] ; final vec 2 pb = system . m position buffer . data [ b ] ; final vec 2 pc = system . m position buffer . data [ c ] ; final float dabx = pa . x - pb . x ; final float daby = pa . y - pb . y ; final float dbcx = pb . x - pc . x ; final float dbcy = pb . y - pc . y ; final float dcax = pc . x - pa . x ; final float dcay = pc . y - pa . y ; float max distance squared = settings . max triad distance squared * system . m squared diameter ; if ( dabx * dabx + daby * daby < max distance squared && dbcx * dbcx + dbcy * dbcy < max distance squared && dcax * dcax + dcay * dcay < max distance squared ) { if ( system . m triad count >= system . m triad capacity ) { int old capacity = system . m triad capacity ; int new capacity = system . m triad count != 0 ? 2 * system . m triad count : settings . min particle buffer capacity ; system . m triad buffer = buffer utils . reallocate buffer ( triad . class , system . m triad buffer , old capacity , new capacity ) ; system . m triad capacity = new capacity ; } triad triad = system . m triad buffer [ system . m triad count ] ; triad . index a = a ; triad . index b = b ; triad . index c = c ; triad . flags = af | bf | cf ; triad . strength = math utils . min ( group a . m strength , group b . m strength ) ; final float mid pointx = ( float ) 1 / 3 * ( pa . x + pb . x + pc . x ) ; final float mid pointy = ( float ) 1 / 3 * ( pa . y + pb . y + pc . y ) ; triad . pa . x = pa . x - mid pointx ; triad . pa . y = pa . y - mid pointy ; triad . pb . x = pb . x - mid pointx ; triad . pb . y = pb . y - mid pointy ; triad . pc . x = pc . x - mid pointx ; triad . pc . y = pc . y - mid pointy ; triad . ka = - ( dcax * dabx + dcay * daby ) ; triad . kb = - ( dabx * dbcx + daby * dbcy ) ; triad . kc = - ( dbcx * dcax + dbcy * dcay ) ; triad . s = vec 2 . cross ( pa , pb ) + vec 2 . cross ( pb , pc ) + vec 2 . cross ( pc , pa ) ; system . m triad count ++ ; } } } } <SENTENCE_END/>


Original Name init

init

<SENTENCE_START> { this . system = system ; this . shape = shape ; this . xf = xf ; this . destroyed = 0 ; this . call destruction listener = call destruction listener ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { this . system = system ; this . shape = shape ; this . xf = xf ; this . destroyed = 0 ; this . call destruction listener = call destruction listener ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { this . system = system ; this . shape = shape ; this . xf = xf ; this . destroyed = 0 ; this . call destruction listener = call destruction listener ; } <SENTENCE_END/>

(Copy Probability: 51.2%)

<SENTENCE_START> { this . system = system ; this . shape = shape ; this . xf = xf ; this . destroyed = 0 ; this . call destruction listener = call destruction listener ; } <SENTENCE_END/>


Original Name is,proxy,invalid

is

<SENTENCE_START> { return proxy . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return proxy . index < 0 ; } <SENTENCE_END/>

proxy

<SENTENCE_START> { return proxy . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { return proxy . index < 0 ; } <SENTENCE_END/>

invalid

<SENTENCE_START> { return proxy . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return proxy . index < 0 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return proxy . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return proxy . index < 0 ; } <SENTENCE_END/>


Original Name is,contact,invalid

is

<SENTENCE_START> { return contact . index a < 0 || contact . index b < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return contact . index a < 0 || contact . index b < 0 ; } <SENTENCE_END/>

contact

<SENTENCE_START> { return contact . index a < 0 || contact . index b < 0 ; } <SENTENCE_END/>

(Copy Probability: 2.0%)

<SENTENCE_START> { return contact . index a < 0 || contact . index b < 0 ; } <SENTENCE_END/>

invalid

<SENTENCE_START> { return contact . index a < 0 || contact . index b < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { return contact . index a < 0 || contact . index b < 0 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return contact . index a < 0 || contact . index b < 0 ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { return contact . index a < 0 || contact . index b < 0 ; } <SENTENCE_END/>


Original Name is,body,contact,invalid

is

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

body

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

contact

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

invalid

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { return contact . index < 0 ; } <SENTENCE_END/>


Original Name is,pair,invalid

is

<SENTENCE_START> { return pair . index a < 0 || pair . index b < 0 ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { return pair . index a < 0 || pair . index b < 0 ; } <SENTENCE_END/>

pair

<SENTENCE_START> { return pair . index a < 0 || pair . index b < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { return pair . index a < 0 || pair . index b < 0 ; } <SENTENCE_END/>

invalid

<SENTENCE_START> { return pair . index a < 0 || pair . index b < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return pair . index a < 0 || pair . index b < 0 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return pair . index a < 0 || pair . index b < 0 ; } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { return pair . index a < 0 || pair . index b < 0 ; } <SENTENCE_END/>


Original Name is,triad,invalid

is

<SENTENCE_START> { return triad . index a < 0 || triad . index b < 0 || triad . index c < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return triad . index a < 0 || triad . index b < 0 || triad . index c < 0 ; } <SENTENCE_END/>

triad

<SENTENCE_START> { return triad . index a < 0 || triad . index b < 0 || triad . index c < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return triad . index a < 0 || triad . index b < 0 || triad . index c < 0 ; } <SENTENCE_END/>

invalid

<SENTENCE_START> { return triad . index a < 0 || triad . index b < 0 || triad . index c < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { return triad . index a < 0 || triad . index b < 0 || triad . index c < 0 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return triad . index a < 0 || triad . index b < 0 || triad . index c < 0 ; } <SENTENCE_END/>

(Copy Probability: 1.8%)

<SENTENCE_START> { return triad . index a < 0 || triad . index b < 0 || triad . index c < 0 ; } <SENTENCE_END/>


Original Name parse

parse

<SENTENCE_START> { array list < c method > methods = new array list < c method > ( ) ; int index = header file . index of ( c method marker ) ; if ( index == - 1 ) return null ; while ( index >= 0 ) { c method method = parse c method ( header file , index ) ; if ( method == null ) throw new runtime exception ( "Couldn't parse method" ) ; methods . add ( method ) ; index = header file . index of ( c method marker , method . end index ) ; } return new c method parser result ( methods ) ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { array list < c method > methods = new array list < c method > ( ) ; int index = header file . index of ( c method marker ) ; if ( index == - 1 ) return null ; while ( index >= 0 ) { c method method = parse c method ( header file , index ) ; if ( method == null ) throw new runtime exception ( "Couldn't parse method" ) ; methods . add ( method ) ; index = header file . index of ( c method marker , method . end index ) ; } return new c method parser result ( methods ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { array list < c method > methods = new array list < c method > ( ) ; int index = header file . index of ( c method marker ) ; if ( index == - 1 ) return null ; while ( index >= 0 ) { c method method = parse c method ( header file , index ) ; if ( method == null ) throw new runtime exception ( "Couldn't parse method" ) ; methods . add ( method ) ; index = header file . index of ( c method marker , method . end index ) ; } return new c method parser result ( methods ) ; } <SENTENCE_END/>

(Copy Probability: 33.6%)

<SENTENCE_START> { array list < c method > methods = new array list < c method > ( ) ; int index = header file . index of ( c method marker ) ; if ( index == - 1 ) return null ; while ( index >= 0 ) { c method method = parse c method ( header file , index ) ; if ( method == null ) throw new runtime exception ( "Couldn't parse method" ) ; methods . add ( method ) ; index = header file . index of ( c method marker , method . end index ) ; } return new c method parser result ( methods ) ; } <SENTENCE_END/>


Original Name parse,c,method

parse

<SENTENCE_START> { int head end = header file . index of ( '(' , start ) ; string head = header file . substring ( start , head end ) . trim ( ) ; string return type = head . split ( " " ) [ 1 ] . trim ( ) ; int args start = head end + 1 ; int args end = header file . index of ( ')' , args start ) ; string [ ] args = header file . substring ( args start , args end ) . split ( "," ) ; return new c method ( return type , head , args , start , args end + 1 ) ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { int head end = header file . index of ( '(' , start ) ; string head = header file . substring ( start , head end ) . trim ( ) ; string return type = head . split ( " " ) [ 1 ] . trim ( ) ; int args start = head end + 1 ; int args end = header file . index of ( ')' , args start ) ; string [ ] args = header file . substring ( args start , args end ) . split ( "," ) ; return new c method ( return type , head , args , start , args end + 1 ) ; } <SENTENCE_END/>

c

<SENTENCE_START> { int head end = header file . index of ( '(' , start ) ; string head = header file . substring ( start , head end ) . trim ( ) ; string return type = head . split ( " " ) [ 1 ] . trim ( ) ; int args start = head end + 1 ; int args end = header file . index of ( ')' , args start ) ; string [ ] args = header file . substring ( args start , args end ) . split ( "," ) ; return new c method ( return type , head , args , start , args end + 1 ) ; } <SENTENCE_END/>

(Copy Probability: 11.4%)

<SENTENCE_START> { int head end = header file . index of ( '(' , start ) ; string head = header file . substring ( start , head end ) . trim ( ) ; string return type = head . split ( " " ) [ 1 ] . trim ( ) ; int args start = head end + 1 ; int args end = header file . index of ( ')' , args start ) ; string [ ] args = header file . substring ( args start , args end ) . split ( "," ) ; return new c method ( return type , head , args , start , args end + 1 ) ; } <SENTENCE_END/>

method

<SENTENCE_START> { int head end = header file . index of ( '(' , start ) ; string head = header file . substring ( start , head end ) . trim ( ) ; string return type = head . split ( " " ) [ 1 ] . trim ( ) ; int args start = head end + 1 ; int args end = header file . index of ( ')' , args start ) ; string [ ] args = header file . substring ( args start , args end ) . split ( "," ) ; return new c method ( return type , head , args , start , args end + 1 ) ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { int head end = header file . index of ( '(' , start ) ; string head = header file . substring ( start , head end ) . trim ( ) ; string return type = head . split ( " " ) [ 1 ] . trim ( ) ; int args start = head end + 1 ; int args end = header file . index of ( ')' , args start ) ; string [ ] args = header file . substring ( args start , args end ) . split ( "," ) ; return new c method ( return type , head , args , start , args end + 1 ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int head end = header file . index of ( '(' , start ) ; string head = header file . substring ( start , head end ) . trim ( ) ; string return type = head . split ( " " ) [ 1 ] . trim ( ) ; int args start = head end + 1 ; int args end = header file . index of ( ')' , args start ) ; string [ ] args = header file . substring ( args start , args end ) . split ( "," ) ; return new c method ( return type , head , args , start , args end + 1 ) ; } <SENTENCE_END/>

(Copy Probability: 4.5%)

<SENTENCE_START> { int head end = header file . index of ( '(' , start ) ; string head = header file . substring ( start , head end ) . trim ( ) ; string return type = head . split ( " " ) [ 1 ] . trim ( ) ; int args start = head end + 1 ; int args end = header file . index of ( ')' , args start ) ; string [ ] args = header file . substring ( args start , args end ) . split ( "," ) ; return new c method ( return type , head , args , start , args end + 1 ) ; } <SENTENCE_END/>


Original Name get,c,ptr

get

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

c

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 99.7%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>


Original Name set,graphics,world,trans

set

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

graphics

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.6%)

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

world

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

trans

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 96.4%)

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 12.9%)

<SENTENCE_START> { linear math jni . bt default motion state graphics world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,graphics,world,trans

get

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

graphics

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.5%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

world

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

trans

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 96.0%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 13.3%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state graphics world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,center,of,mass,offset

set

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

center

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.4%)

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

of

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

mass

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 94.8%)

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

offset

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 19.9%)

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 19.6%)

<SENTENCE_START> { linear math jni . bt default motion state center of mass offset set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,center,of,mass,offset

get

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

center

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.3%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

of

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

mass

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 95.4%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

offset

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 20.6%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 20.2%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state center of mass offset get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,start,world,trans

set

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

start

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.5%)

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

world

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

trans

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 95.2%)

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 12.6%)

<SENTENCE_START> { linear math jni . bt default motion state start world trans set ( swig c ptr , this , bt transform . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,start,world,trans

get

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

start

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.5%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

world

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

trans

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 94.7%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 12.8%)

<SENTENCE_START> { long c ptr = linear math jni . bt default motion state start world trans get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt transform ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,user,pointer

set

<SENTENCE_START> { linear math jni . bt default motion state user pointer set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { linear math jni . bt default motion state user pointer set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

user

<SENTENCE_START> { linear math jni . bt default motion state user pointer set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.4%)

<SENTENCE_START> { linear math jni . bt default motion state user pointer set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

pointer

<SENTENCE_START> { linear math jni . bt default motion state user pointer set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { linear math jni . bt default motion state user pointer set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { linear math jni . bt default motion state user pointer set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 54.9%)

<SENTENCE_START> { linear math jni . bt default motion state user pointer set ( swig c ptr , this , value ) ; } <SENTENCE_END/>


Original Name get,user,pointer

get

<SENTENCE_START> { return linear math jni . bt default motion state user pointer get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { return linear math jni . bt default motion state user pointer get ( swig c ptr , this ) ; } <SENTENCE_END/>

user

<SENTENCE_START> { return linear math jni . bt default motion state user pointer get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.6%)

<SENTENCE_START> { return linear math jni . bt default motion state user pointer get ( swig c ptr , this ) ; } <SENTENCE_END/>

pointer

<SENTENCE_START> { return linear math jni . bt default motion state user pointer get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return linear math jni . bt default motion state user pointer get ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return linear math jni . bt default motion state user pointer get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 63.5%)

<SENTENCE_START> { return linear math jni . bt default motion state user pointer get ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name get,graphics,world,trans

get

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 4.3%)

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

graphics

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 99.6%)

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

world

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

trans

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 98.2%)

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 19.8%)

<SENTENCE_START> { linear math jni . bt default motion state get graphics world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>


Original Name get,center,of,mass,offset

get

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

center

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 99.6%)

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

of

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

mass

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 97.7%)

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

offset

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 28.8%)

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 28.3%)

<SENTENCE_START> { linear math jni . bt default motion state get center of mass offset ( swig c ptr , this , out ) ; } <SENTENCE_END/>


Original Name get,start,world,trans

get

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

start

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 99.5%)

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

world

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

trans

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 97.5%)

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>

(Copy Probability: 19.2%)

<SENTENCE_START> { linear math jni . bt default motion state get start world trans ( swig c ptr , this , out ) ; } <SENTENCE_END/>


Original Name put

put

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; if ( ! has zero value ) { has zero value = true ; size ++ ; } return ; } int [ ] key table = this . key table ; int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key == key 1 ) { value table [ index 1 ] = value ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key == key 2 ) { value table [ index 2 ] = value ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key == key 3 ) { value table [ index 3 ] = value ; return ; } for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { value table [ i ] = value ; return ; } } if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>

(Copy Probability: 4.1%)

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; if ( ! has zero value ) { has zero value = true ; size ++ ; } return ; } int [ ] key table = this . key table ; int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key == key 1 ) { value table [ index 1 ] = value ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key == key 2 ) { value table [ index 2 ] = value ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key == key 3 ) { value table [ index 3 ] = value ; return ; } for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { value table [ i ] = value ; return ; } } if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; if ( ! has zero value ) { has zero value = true ; size ++ ; } return ; } int [ ] key table = this . key table ; int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key == key 1 ) { value table [ index 1 ] = value ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key == key 2 ) { value table [ index 2 ] = value ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key == key 3 ) { value table [ index 3 ] = value ; return ; } for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { value table [ i ] = value ; return ; } } if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>

(Copy Probability: 32.4%)

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; if ( ! has zero value ) { has zero value = true ; size ++ ; } return ; } int [ ] key table = this . key table ; int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key == key 1 ) { value table [ index 1 ] = value ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key == key 2 ) { value table [ index 2 ] = value ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key == key 3 ) { value table [ index 3 ] = value ; return ; } for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { value table [ i ] = value ; return ; } } if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>


Original Name put,all

put

<SENTENCE_START> { for ( entry entry : map . entries ( ) ) put ( entry . key , entry . value ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { for ( entry entry : map . entries ( ) ) put ( entry . key , entry . value ) ; } <SENTENCE_END/>

all

<SENTENCE_START> { for ( entry entry : map . entries ( ) ) put ( entry . key , entry . value ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { for ( entry entry : map . entries ( ) ) put ( entry . key , entry . value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( entry entry : map . entries ( ) ) put ( entry . key , entry . value ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { for ( entry entry : map . entries ( ) ) put ( entry . key , entry . value ) ; } <SENTENCE_END/>


Original Name put,resize

put

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; has zero value = true ; return ; } int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>

(Copy Probability: 4.5%)

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; has zero value = true ; return ; } int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>

resize

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; has zero value = true ; return ; } int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>

(Copy Probability: 24.4%)

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; has zero value = true ; return ; } int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; has zero value = true ; return ; } int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>

(Copy Probability: 6.6%)

<SENTENCE_START> { if ( key == 0 ) { zero value = value ; has zero value = true ; return ; } int index 1 = key & mask ; int key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = key ; value table [ index 1 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 2 = hash 2 ( key ) ; int key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = key ; value table [ index 2 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } int index 3 = hash 3 ( key ) ; int key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = key ; value table [ index 3 ] = value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } push ( key , value , index 1 , key 1 , index 2 , key 2 , index 3 , key 3 ) ; } <SENTENCE_END/>


Original Name push

push

<SENTENCE_START> { int [ ] key table = this . key table ; int [ ] value table = this . value table ; int mask = this . mask ; int evicted key ; int evicted value ; int i = 0 , push iterations = this . push iterations ; do { switch ( math utils . random ( 2 ) ) { case 0 : evicted key = key 1 ; evicted value = value table [ index 1 ] ; key table [ index 1 ] = insert key ; value table [ index 1 ] = insert value ; break ; case 1 : evicted key = key 2 ; evicted value = value table [ index 2 ] ; key table [ index 2 ] = insert key ; value table [ index 2 ] = insert value ; break ; default : evicted key = key 3 ; evicted value = value table [ index 3 ] ; key table [ index 3 ] = insert key ; value table [ index 3 ] = insert value ; break ; } index 1 = evicted key & mask ; key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = evicted key ; value table [ index 1 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } index 2 = hash 2 ( evicted key ) ; key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = evicted key ; value table [ index 2 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } index 3 = hash 3 ( evicted key ) ; key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = evicted key ; value table [ index 3 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( ++ i == push iterations ) break ; insert key = evicted key ; insert value = evicted value ; } while ( true ) ; put stash ( evicted key , evicted value ) ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { int [ ] key table = this . key table ; int [ ] value table = this . value table ; int mask = this . mask ; int evicted key ; int evicted value ; int i = 0 , push iterations = this . push iterations ; do { switch ( math utils . random ( 2 ) ) { case 0 : evicted key = key 1 ; evicted value = value table [ index 1 ] ; key table [ index 1 ] = insert key ; value table [ index 1 ] = insert value ; break ; case 1 : evicted key = key 2 ; evicted value = value table [ index 2 ] ; key table [ index 2 ] = insert key ; value table [ index 2 ] = insert value ; break ; default : evicted key = key 3 ; evicted value = value table [ index 3 ] ; key table [ index 3 ] = insert key ; value table [ index 3 ] = insert value ; break ; } index 1 = evicted key & mask ; key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = evicted key ; value table [ index 1 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } index 2 = hash 2 ( evicted key ) ; key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = evicted key ; value table [ index 2 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } index 3 = hash 3 ( evicted key ) ; key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = evicted key ; value table [ index 3 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( ++ i == push iterations ) break ; insert key = evicted key ; insert value = evicted value ; } while ( true ) ; put stash ( evicted key , evicted value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int [ ] key table = this . key table ; int [ ] value table = this . value table ; int mask = this . mask ; int evicted key ; int evicted value ; int i = 0 , push iterations = this . push iterations ; do { switch ( math utils . random ( 2 ) ) { case 0 : evicted key = key 1 ; evicted value = value table [ index 1 ] ; key table [ index 1 ] = insert key ; value table [ index 1 ] = insert value ; break ; case 1 : evicted key = key 2 ; evicted value = value table [ index 2 ] ; key table [ index 2 ] = insert key ; value table [ index 2 ] = insert value ; break ; default : evicted key = key 3 ; evicted value = value table [ index 3 ] ; key table [ index 3 ] = insert key ; value table [ index 3 ] = insert value ; break ; } index 1 = evicted key & mask ; key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = evicted key ; value table [ index 1 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } index 2 = hash 2 ( evicted key ) ; key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = evicted key ; value table [ index 2 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } index 3 = hash 3 ( evicted key ) ; key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = evicted key ; value table [ index 3 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( ++ i == push iterations ) break ; insert key = evicted key ; insert value = evicted value ; } while ( true ) ; put stash ( evicted key , evicted value ) ; } <SENTENCE_END/>

(Copy Probability: 5.0%)

<SENTENCE_START> { int [ ] key table = this . key table ; int [ ] value table = this . value table ; int mask = this . mask ; int evicted key ; int evicted value ; int i = 0 , push iterations = this . push iterations ; do { switch ( math utils . random ( 2 ) ) { case 0 : evicted key = key 1 ; evicted value = value table [ index 1 ] ; key table [ index 1 ] = insert key ; value table [ index 1 ] = insert value ; break ; case 1 : evicted key = key 2 ; evicted value = value table [ index 2 ] ; key table [ index 2 ] = insert key ; value table [ index 2 ] = insert value ; break ; default : evicted key = key 3 ; evicted value = value table [ index 3 ] ; key table [ index 3 ] = insert key ; value table [ index 3 ] = insert value ; break ; } index 1 = evicted key & mask ; key 1 = key table [ index 1 ] ; if ( key 1 == empty ) { key table [ index 1 ] = evicted key ; value table [ index 1 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } index 2 = hash 2 ( evicted key ) ; key 2 = key table [ index 2 ] ; if ( key 2 == empty ) { key table [ index 2 ] = evicted key ; value table [ index 2 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } index 3 = hash 3 ( evicted key ) ; key 3 = key table [ index 3 ] ; if ( key 3 == empty ) { key table [ index 3 ] = evicted key ; value table [ index 3 ] = evicted value ; if ( size ++ >= threshold ) resize ( capacity << 1 ) ; return ; } if ( ++ i == push iterations ) break ; insert key = evicted key ; insert value = evicted value ; } while ( true ) ; put stash ( evicted key , evicted value ) ; } <SENTENCE_END/>


Original Name put,stash

put

<SENTENCE_START> { if ( stash size == stash capacity ) { resize ( capacity << 1 ) ; put ( key , value ) ; return ; } int index = capacity + stash size ; key table [ index ] = key ; value table [ index ] = value ; stash size ++ ; size ++ ; } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { if ( stash size == stash capacity ) { resize ( capacity << 1 ) ; put ( key , value ) ; return ; } int index = capacity + stash size ; key table [ index ] = key ; value table [ index ] = value ; stash size ++ ; size ++ ; } <SENTENCE_END/>

stash

<SENTENCE_START> { if ( stash size == stash capacity ) { resize ( capacity << 1 ) ; put ( key , value ) ; return ; } int index = capacity + stash size ; key table [ index ] = key ; value table [ index ] = value ; stash size ++ ; size ++ ; } <SENTENCE_END/>

(Copy Probability: 97.5%)

<SENTENCE_START> { if ( stash size == stash capacity ) { resize ( capacity << 1 ) ; put ( key , value ) ; return ; } int index = capacity + stash size ; key table [ index ] = key ; value table [ index ] = value ; stash size ++ ; size ++ ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( stash size == stash capacity ) { resize ( capacity << 1 ) ; put ( key , value ) ; return ; } int index = capacity + stash size ; key table [ index ] = key ; value table [ index ] = value ; stash size ++ ; size ++ ; } <SENTENCE_END/>

(Copy Probability: 12.5%)

<SENTENCE_START> { if ( stash size == stash capacity ) { resize ( capacity << 1 ) ; put ( key , value ) ; return ; } int index = capacity + stash size ; key table [ index ] = key ; value table [ index ] = value ; stash size ++ ; size ++ ; } <SENTENCE_END/>


Original Name get

get

<SENTENCE_START> { if ( key == 0 ) { if ( ! has zero value ) return default value ; return zero value ; } int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return get stash ( key , default value ) ; } } return value table [ index ] ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { if ( key == 0 ) { if ( ! has zero value ) return default value ; return zero value ; } int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return get stash ( key , default value ) ; } } return value table [ index ] ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( key == 0 ) { if ( ! has zero value ) return default value ; return zero value ; } int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return get stash ( key , default value ) ; } } return value table [ index ] ; } <SENTENCE_END/>

(Copy Probability: 17.4%)

<SENTENCE_START> { if ( key == 0 ) { if ( ! has zero value ) return default value ; return zero value ; } int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return get stash ( key , default value ) ; } } return value table [ index ] ; } <SENTENCE_END/>


Original Name get,stash

get

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return value table [ i ] ; return default value ; } <SENTENCE_END/>

(Copy Probability: 5.9%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return value table [ i ] ; return default value ; } <SENTENCE_END/>

stash

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return value table [ i ] ; return default value ; } <SENTENCE_END/>

(Copy Probability: 95.8%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return value table [ i ] ; return default value ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return value table [ i ] ; return default value ; } <SENTENCE_END/>

(Copy Probability: 79.9%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return value table [ i ] ; return default value ; } <SENTENCE_END/>


Original Name get,and,increment

get

<SENTENCE_START> { if ( key == 0 ) { if ( has zero value ) { int value = zero value ; zero value += increment ; return value ; } else { has zero value = true ; zero value = default value + increment ; ++ size ; return default value ; } } int index = key & mask ; if ( key != key table [ index ] ) { index = hash 2 ( key ) ; if ( key != key table [ index ] ) { index = hash 3 ( key ) ; if ( key != key table [ index ] ) return get and increment stash ( key , default value , increment ) ; } } int value = value table [ index ] ; value table [ index ] = value + increment ; return value ; } <SENTENCE_END/>

(Copy Probability: 7.7%)

<SENTENCE_START> { if ( key == 0 ) { if ( has zero value ) { int value = zero value ; zero value += increment ; return value ; } else { has zero value = true ; zero value = default value + increment ; ++ size ; return default value ; } } int index = key & mask ; if ( key != key table [ index ] ) { index = hash 2 ( key ) ; if ( key != key table [ index ] ) { index = hash 3 ( key ) ; if ( key != key table [ index ] ) return get and increment stash ( key , default value , increment ) ; } } int value = value table [ index ] ; value table [ index ] = value + increment ; return value ; } <SENTENCE_END/>

and

<SENTENCE_START> { if ( key == 0 ) { if ( has zero value ) { int value = zero value ; zero value += increment ; return value ; } else { has zero value = true ; zero value = default value + increment ; ++ size ; return default value ; } } int index = key & mask ; if ( key != key table [ index ] ) { index = hash 2 ( key ) ; if ( key != key table [ index ] ) { index = hash 3 ( key ) ; if ( key != key table [ index ] ) return get and increment stash ( key , default value , increment ) ; } } int value = value table [ index ] ; value table [ index ] = value + increment ; return value ; } <SENTENCE_END/>

(Copy Probability: 97.1%)

<SENTENCE_START> { if ( key == 0 ) { if ( has zero value ) { int value = zero value ; zero value += increment ; return value ; } else { has zero value = true ; zero value = default value + increment ; ++ size ; return default value ; } } int index = key & mask ; if ( key != key table [ index ] ) { index = hash 2 ( key ) ; if ( key != key table [ index ] ) { index = hash 3 ( key ) ; if ( key != key table [ index ] ) return get and increment stash ( key , default value , increment ) ; } } int value = value table [ index ] ; value table [ index ] = value + increment ; return value ; } <SENTENCE_END/>

increment

<SENTENCE_START> { if ( key == 0 ) { if ( has zero value ) { int value = zero value ; zero value += increment ; return value ; } else { has zero value = true ; zero value = default value + increment ; ++ size ; return default value ; } } int index = key & mask ; if ( key != key table [ index ] ) { index = hash 2 ( key ) ; if ( key != key table [ index ] ) { index = hash 3 ( key ) ; if ( key != key table [ index ] ) return get and increment stash ( key , default value , increment ) ; } } int value = value table [ index ] ; value table [ index ] = value + increment ; return value ; } <SENTENCE_END/>

(Copy Probability: 98.9%)

<SENTENCE_START> { if ( key == 0 ) { if ( has zero value ) { int value = zero value ; zero value += increment ; return value ; } else { has zero value = true ; zero value = default value + increment ; ++ size ; return default value ; } } int index = key & mask ; if ( key != key table [ index ] ) { index = hash 2 ( key ) ; if ( key != key table [ index ] ) { index = hash 3 ( key ) ; if ( key != key table [ index ] ) return get and increment stash ( key , default value , increment ) ; } } int value = value table [ index ] ; value table [ index ] = value + increment ; return value ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( key == 0 ) { if ( has zero value ) { int value = zero value ; zero value += increment ; return value ; } else { has zero value = true ; zero value = default value + increment ; ++ size ; return default value ; } } int index = key & mask ; if ( key != key table [ index ] ) { index = hash 2 ( key ) ; if ( key != key table [ index ] ) { index = hash 3 ( key ) ; if ( key != key table [ index ] ) return get and increment stash ( key , default value , increment ) ; } } int value = value table [ index ] ; value table [ index ] = value + increment ; return value ; } <SENTENCE_END/>

(Copy Probability: 69.5%)

<SENTENCE_START> { if ( key == 0 ) { if ( has zero value ) { int value = zero value ; zero value += increment ; return value ; } else { has zero value = true ; zero value = default value + increment ; ++ size ; return default value ; } } int index = key & mask ; if ( key != key table [ index ] ) { index = hash 2 ( key ) ; if ( key != key table [ index ] ) { index = hash 3 ( key ) ; if ( key != key table [ index ] ) return get and increment stash ( key , default value , increment ) ; } } int value = value table [ index ] ; value table [ index ] = value + increment ; return value ; } <SENTENCE_END/>


Original Name get,and,increment,stash

get

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

(Copy Probability: 5.5%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

and

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

(Copy Probability: 83.0%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

increment

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

(Copy Probability: 60.6%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

stash

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

(Copy Probability: 15.0%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>

(Copy Probability: 7.3%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) { int value = value table [ i ] ; value table [ i ] = value + increment ; return value ; } put ( key , default value + increment ) ; return default value ; } <SENTENCE_END/>


Original Name remove

remove

<SENTENCE_START> { if ( key == 0 ) { if ( ! has zero value ) return default value ; has zero value = false ; size -- ; return zero value ; } int index = key & mask ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } index = hash 2 ( key ) ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } index = hash 3 ( key ) ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } return remove stash ( key , default value ) ; } <SENTENCE_END/>

(Copy Probability: 2.7%)

<SENTENCE_START> { if ( key == 0 ) { if ( ! has zero value ) return default value ; has zero value = false ; size -- ; return zero value ; } int index = key & mask ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } index = hash 2 ( key ) ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } index = hash 3 ( key ) ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } return remove stash ( key , default value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( key == 0 ) { if ( ! has zero value ) return default value ; has zero value = false ; size -- ; return zero value ; } int index = key & mask ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } index = hash 2 ( key ) ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } index = hash 3 ( key ) ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } return remove stash ( key , default value ) ; } <SENTENCE_END/>

(Copy Probability: 10.6%)

<SENTENCE_START> { if ( key == 0 ) { if ( ! has zero value ) return default value ; has zero value = false ; size -- ; return zero value ; } int index = key & mask ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } index = hash 2 ( key ) ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } index = hash 3 ( key ) ; if ( key == key table [ index ] ) { key table [ index ] = empty ; int old value = value table [ index ] ; size -- ; return old value ; } return remove stash ( key , default value ) ; } <SENTENCE_END/>


Original Name remove,stash

remove

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { int old value = value table [ i ] ; remove stash index ( i ) ; size -- ; return old value ; } } return default value ; } <SENTENCE_END/>

(Copy Probability: 5.2%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { int old value = value table [ i ] ; remove stash index ( i ) ; size -- ; return old value ; } } return default value ; } <SENTENCE_END/>

stash

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { int old value = value table [ i ] ; remove stash index ( i ) ; size -- ; return old value ; } } return default value ; } <SENTENCE_END/>

(Copy Probability: 86.0%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { int old value = value table [ i ] ; remove stash index ( i ) ; size -- ; return old value ; } } return default value ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { int old value = value table [ i ] ; remove stash index ( i ) ; size -- ; return old value ; } } return default value ; } <SENTENCE_END/>

(Copy Probability: 25.7%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) { if ( key == key table [ i ] ) { int old value = value table [ i ] ; remove stash index ( i ) ; size -- ; return old value ; } } return default value ; } <SENTENCE_END/>


Original Name remove,stash,index

remove

<SENTENCE_START> { stash size -- ; int last index = capacity + stash size ; if ( index < last index ) { key table [ index ] = key table [ last index ] ; value table [ index ] = value table [ last index ] ; } } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { stash size -- ; int last index = capacity + stash size ; if ( index < last index ) { key table [ index ] = key table [ last index ] ; value table [ index ] = value table [ last index ] ; } } <SENTENCE_END/>

stash

<SENTENCE_START> { stash size -- ; int last index = capacity + stash size ; if ( index < last index ) { key table [ index ] = key table [ last index ] ; value table [ index ] = value table [ last index ] ; } } <SENTENCE_END/>

(Copy Probability: 94.6%)

<SENTENCE_START> { stash size -- ; int last index = capacity + stash size ; if ( index < last index ) { key table [ index ] = key table [ last index ] ; value table [ index ] = value table [ last index ] ; } } <SENTENCE_END/>

index

<SENTENCE_START> { stash size -- ; int last index = capacity + stash size ; if ( index < last index ) { key table [ index ] = key table [ last index ] ; value table [ index ] = value table [ last index ] ; } } <SENTENCE_END/>

(Copy Probability: 5.8%)

<SENTENCE_START> { stash size -- ; int last index = capacity + stash size ; if ( index < last index ) { key table [ index ] = key table [ last index ] ; value table [ index ] = value table [ last index ] ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { stash size -- ; int last index = capacity + stash size ; if ( index < last index ) { key table [ index ] = key table [ last index ] ; value table [ index ] = value table [ last index ] ; } } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { stash size -- ; int last index = capacity + stash size ; if ( index < last index ) { key table [ index ] = key table [ last index ] ; value table [ index ] = value table [ last index ] ; } } <SENTENCE_END/>


Original Name shrink

shrink

<SENTENCE_START> { if ( maximum capacity < 0 ) throw new illegal argument exception ( "maximumCapacity must be >= 0: " + maximum capacity ) ; if ( size > maximum capacity ) maximum capacity = size ; if ( capacity <= maximum capacity ) return ; maximum capacity = math utils . next power of two ( maximum capacity ) ; resize ( maximum capacity ) ; } <SENTENCE_END/>

(Copy Probability: 2.2%)

<SENTENCE_START> { if ( maximum capacity < 0 ) throw new illegal argument exception ( "maximumCapacity must be >= 0: " + maximum capacity ) ; if ( size > maximum capacity ) maximum capacity = size ; if ( capacity <= maximum capacity ) return ; maximum capacity = math utils . next power of two ( maximum capacity ) ; resize ( maximum capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( maximum capacity < 0 ) throw new illegal argument exception ( "maximumCapacity must be >= 0: " + maximum capacity ) ; if ( size > maximum capacity ) maximum capacity = size ; if ( capacity <= maximum capacity ) return ; maximum capacity = math utils . next power of two ( maximum capacity ) ; resize ( maximum capacity ) ; } <SENTENCE_END/>

(Copy Probability: 1.6%)

<SENTENCE_START> { if ( maximum capacity < 0 ) throw new illegal argument exception ( "maximumCapacity must be >= 0: " + maximum capacity ) ; if ( size > maximum capacity ) maximum capacity = size ; if ( capacity <= maximum capacity ) return ; maximum capacity = math utils . next power of two ( maximum capacity ) ; resize ( maximum capacity ) ; } <SENTENCE_END/>


Original Name clear

clear

<SENTENCE_START> { if ( capacity <= maximum capacity ) { %SELF% ( ) ; return ; } has zero value = false ; size = 0 ; resize ( maximum capacity ) ; } <SENTENCE_END/>

(Copy Probability: 1.3%)

<SENTENCE_START> { if ( capacity <= maximum capacity ) { %SELF% ( ) ; return ; } has zero value = false ; size = 0 ; resize ( maximum capacity ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( capacity <= maximum capacity ) { %SELF% ( ) ; return ; } has zero value = false ; size = 0 ; resize ( maximum capacity ) ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { if ( capacity <= maximum capacity ) { %SELF% ( ) ; return ; } has zero value = false ; size = 0 ; resize ( maximum capacity ) ; } <SENTENCE_END/>


Original Name clear

clear

<SENTENCE_START> { if ( size == 0 ) return ; int [ ] key table = this . key table ; for ( int i = capacity + stash size ; i -- > 0 ; ) key table [ i ] = empty ; size = 0 ; stash size = 0 ; has zero value = false ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { if ( size == 0 ) return ; int [ ] key table = this . key table ; for ( int i = capacity + stash size ; i -- > 0 ; ) key table [ i ] = empty ; size = 0 ; stash size = 0 ; has zero value = false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( size == 0 ) return ; int [ ] key table = this . key table ; for ( int i = capacity + stash size ; i -- > 0 ; ) key table [ i ] = empty ; size = 0 ; stash size = 0 ; has zero value = false ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { if ( size == 0 ) return ; int [ ] key table = this . key table ; for ( int i = capacity + stash size ; i -- > 0 ; ) key table [ i ] = empty ; size = 0 ; stash size = 0 ; has zero value = false ; } <SENTENCE_END/>


Original Name contains,value

contains

<SENTENCE_START> { if ( has zero value && zero value == value ) return true ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return true ; return false ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { if ( has zero value && zero value == value ) return true ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return true ; return false ; } <SENTENCE_END/>

value

<SENTENCE_START> { if ( has zero value && zero value == value ) return true ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return true ; return false ; } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { if ( has zero value && zero value == value ) return true ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return true ; return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( has zero value && zero value == value ) return true ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return true ; return false ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { if ( has zero value && zero value == value ) return true ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return true ; return false ; } <SENTENCE_END/>


Original Name contains,key

contains

<SENTENCE_START> { if ( key == 0 ) return has zero value ; int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return contains key stash ( key ) ; } } return true ; } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { if ( key == 0 ) return has zero value ; int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return contains key stash ( key ) ; } } return true ; } <SENTENCE_END/>

key

<SENTENCE_START> { if ( key == 0 ) return has zero value ; int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return contains key stash ( key ) ; } } return true ; } <SENTENCE_END/>

(Copy Probability: 5.8%)

<SENTENCE_START> { if ( key == 0 ) return has zero value ; int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return contains key stash ( key ) ; } } return true ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( key == 0 ) return has zero value ; int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return contains key stash ( key ) ; } } return true ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { if ( key == 0 ) return has zero value ; int index = key & mask ; if ( key table [ index ] != key ) { index = hash 2 ( key ) ; if ( key table [ index ] != key ) { index = hash 3 ( key ) ; if ( key table [ index ] != key ) return contains key stash ( key ) ; } } return true ; } <SENTENCE_END/>


Original Name contains,key,stash

contains

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return true ; return false ; } <SENTENCE_END/>

(Copy Probability: 6.3%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return true ; return false ; } <SENTENCE_END/>

key

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return true ; return false ; } <SENTENCE_END/>

(Copy Probability: 95.2%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return true ; return false ; } <SENTENCE_END/>

stash

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return true ; return false ; } <SENTENCE_END/>

(Copy Probability: 65.1%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return true ; return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return true ; return false ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { int [ ] key table = this . key table ; for ( int i = capacity , n = i + stash size ; i < n ; i ++ ) if ( key == key table [ i ] ) return true ; return false ; } <SENTENCE_END/>


Original Name find,key

find

<SENTENCE_START> { if ( has zero value && zero value == value ) return 0 ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return key table [ i ] ; return not found ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { if ( has zero value && zero value == value ) return 0 ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return key table [ i ] ; return not found ; } <SENTENCE_END/>

key

<SENTENCE_START> { if ( has zero value && zero value == value ) return 0 ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return key table [ i ] ; return not found ; } <SENTENCE_END/>

(Copy Probability: 14.5%)

<SENTENCE_START> { if ( has zero value && zero value == value ) return 0 ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return key table [ i ] ; return not found ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( has zero value && zero value == value ) return 0 ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return key table [ i ] ; return not found ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { if ( has zero value && zero value == value ) return 0 ; int [ ] key table = this . key table , value table = this . value table ; for ( int i = capacity + stash size ; i -- > 0 ; ) if ( key table [ i ] != 0 && value table [ i ] == value ) return key table [ i ] ; return not found ; } <SENTENCE_END/>


Original Name ensure,capacity

ensure

<SENTENCE_START> { int size needed = size + additional capacity ; if ( size needed >= threshold ) resize ( math utils . next power of two ( ( int ) math . ceil ( size needed / load factor ) ) ) ; } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { int size needed = size + additional capacity ; if ( size needed >= threshold ) resize ( math utils . next power of two ( ( int ) math . ceil ( size needed / load factor ) ) ) ; } <SENTENCE_END/>

capacity

<SENTENCE_START> { int size needed = size + additional capacity ; if ( size needed >= threshold ) resize ( math utils . next power of two ( ( int ) math . ceil ( size needed / load factor ) ) ) ; } <SENTENCE_END/>

(Copy Probability: 6.3%)

<SENTENCE_START> { int size needed = size + additional capacity ; if ( size needed >= threshold ) resize ( math utils . next power of two ( ( int ) math . ceil ( size needed / load factor ) ) ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int size needed = size + additional capacity ; if ( size needed >= threshold ) resize ( math utils . next power of two ( ( int ) math . ceil ( size needed / load factor ) ) ) ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { int size needed = size + additional capacity ; if ( size needed >= threshold ) resize ( math utils . next power of two ( ( int ) math . ceil ( size needed / load factor ) ) ) ; } <SENTENCE_END/>


Original Name resize

resize

<SENTENCE_START> { int old end index = capacity + stash size ; capacity = new size ; threshold = ( int ) ( new size * load factor ) ; mask = new size - 1 ; hash shift = 31 - integer . number of trailing zeros ( new size ) ; stash capacity = math . max ( 3 , ( int ) math . ceil ( math . log ( new size ) ) * 2 ) ; push iterations = math . max ( math . min ( new size , 8 ) , ( int ) math . sqrt ( new size ) / 8 ) ; int [ ] old key table = key table ; int [ ] old value table = value table ; key table = new int [ new size + stash capacity ] ; value table = new int [ new size + stash capacity ] ; int old size = size ; size = has zero value ? 1 : 0 ; stash size = 0 ; if ( old size > 0 ) { for ( int i = 0 ; i < old end index ; i ++ ) { int key = old key table [ i ] ; if ( key != empty ) put resize ( key , old value table [ i ] ) ; } } } <SENTENCE_END/>

(Copy Probability: 3.9%)

<SENTENCE_START> { int old end index = capacity + stash size ; capacity = new size ; threshold = ( int ) ( new size * load factor ) ; mask = new size - 1 ; hash shift = 31 - integer . number of trailing zeros ( new size ) ; stash capacity = math . max ( 3 , ( int ) math . ceil ( math . log ( new size ) ) * 2 ) ; push iterations = math . max ( math . min ( new size , 8 ) , ( int ) math . sqrt ( new size ) / 8 ) ; int [ ] old key table = key table ; int [ ] old value table = value table ; key table = new int [ new size + stash capacity ] ; value table = new int [ new size + stash capacity ] ; int old size = size ; size = has zero value ? 1 : 0 ; stash size = 0 ; if ( old size > 0 ) { for ( int i = 0 ; i < old end index ; i ++ ) { int key = old key table [ i ] ; if ( key != empty ) put resize ( key , old value table [ i ] ) ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { int old end index = capacity + stash size ; capacity = new size ; threshold = ( int ) ( new size * load factor ) ; mask = new size - 1 ; hash shift = 31 - integer . number of trailing zeros ( new size ) ; stash capacity = math . max ( 3 , ( int ) math . ceil ( math . log ( new size ) ) * 2 ) ; push iterations = math . max ( math . min ( new size , 8 ) , ( int ) math . sqrt ( new size ) / 8 ) ; int [ ] old key table = key table ; int [ ] old value table = value table ; key table = new int [ new size + stash capacity ] ; value table = new int [ new size + stash capacity ] ; int old size = size ; size = has zero value ? 1 : 0 ; stash size = 0 ; if ( old size > 0 ) { for ( int i = 0 ; i < old end index ; i ++ ) { int key = old key table [ i ] ; if ( key != empty ) put resize ( key , old value table [ i ] ) ; } } } <SENTENCE_END/>

(Copy Probability: 12.2%)

<SENTENCE_START> { int old end index = capacity + stash size ; capacity = new size ; threshold = ( int ) ( new size * load factor ) ; mask = new size - 1 ; hash shift = 31 - integer . number of trailing zeros ( new size ) ; stash capacity = math . max ( 3 , ( int ) math . ceil ( math . log ( new size ) ) * 2 ) ; push iterations = math . max ( math . min ( new size , 8 ) , ( int ) math . sqrt ( new size ) / 8 ) ; int [ ] old key table = key table ; int [ ] old value table = value table ; key table = new int [ new size + stash capacity ] ; value table = new int [ new size + stash capacity ] ; int old size = size ; size = has zero value ? 1 : 0 ; stash size = 0 ; if ( old size > 0 ) { for ( int i = 0 ; i < old end index ; i ++ ) { int key = old key table [ i ] ; if ( key != empty ) put resize ( key , old value table [ i ] ) ; } } } <SENTENCE_END/>


Original Name hash,2

hash

<SENTENCE_START> { h *= prime 2 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { h *= prime 2 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

2

<SENTENCE_START> { h *= prime 2 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

(Copy Probability: 99.3%)

<SENTENCE_START> { h *= prime 2 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { h *= prime 2 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { h *= prime 2 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>


Original Name hash,3

hash

<SENTENCE_START> { h *= prime 3 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { h *= prime 3 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

3

<SENTENCE_START> { h *= prime 3 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

(Copy Probability: 99.4%)

<SENTENCE_START> { h *= prime 3 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { h *= prime 3 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { h *= prime 3 ; return ( h ^ h >>> hash shift ) & mask ; } <SENTENCE_END/>


Original Name iterator

iterator

<SENTENCE_START> { return entries ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return entries ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return entries ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return entries ( ) ; } <SENTENCE_END/>


Original Name entries

entries

<SENTENCE_START> { if ( entries 1 == null ) { entries 1 = new entries ( this ) ; entries 2 = new entries ( this ) ; } if ( ! entries 1 . valid ) { entries 1 . reset ( ) ; entries 1 . valid = true ; entries 2 . valid = false ; return entries 1 ; } entries 2 . reset ( ) ; entries 2 . valid = true ; entries 1 . valid = false ; return entries 2 ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { if ( entries 1 == null ) { entries 1 = new entries ( this ) ; entries 2 = new entries ( this ) ; } if ( ! entries 1 . valid ) { entries 1 . reset ( ) ; entries 1 . valid = true ; entries 2 . valid = false ; return entries 1 ; } entries 2 . reset ( ) ; entries 2 . valid = true ; entries 1 . valid = false ; return entries 2 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( entries 1 == null ) { entries 1 = new entries ( this ) ; entries 2 = new entries ( this ) ; } if ( ! entries 1 . valid ) { entries 1 . reset ( ) ; entries 1 . valid = true ; entries 2 . valid = false ; return entries 1 ; } entries 2 . reset ( ) ; entries 2 . valid = true ; entries 1 . valid = false ; return entries 2 ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { if ( entries 1 == null ) { entries 1 = new entries ( this ) ; entries 2 = new entries ( this ) ; } if ( ! entries 1 . valid ) { entries 1 . reset ( ) ; entries 1 . valid = true ; entries 2 . valid = false ; return entries 1 ; } entries 2 . reset ( ) ; entries 2 . valid = true ; entries 1 . valid = false ; return entries 2 ; } <SENTENCE_END/>


Original Name values

values

<SENTENCE_START> { if ( values 1 == null ) { values 1 = new values ( this ) ; values 2 = new values ( this ) ; } if ( ! values 1 . valid ) { values 1 . reset ( ) ; values 1 . valid = true ; values 2 . valid = false ; return values 1 ; } values 2 . reset ( ) ; values 2 . valid = true ; values 1 . valid = false ; return values 2 ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { if ( values 1 == null ) { values 1 = new values ( this ) ; values 2 = new values ( this ) ; } if ( ! values 1 . valid ) { values 1 . reset ( ) ; values 1 . valid = true ; values 2 . valid = false ; return values 1 ; } values 2 . reset ( ) ; values 2 . valid = true ; values 1 . valid = false ; return values 2 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( values 1 == null ) { values 1 = new values ( this ) ; values 2 = new values ( this ) ; } if ( ! values 1 . valid ) { values 1 . reset ( ) ; values 1 . valid = true ; values 2 . valid = false ; return values 1 ; } values 2 . reset ( ) ; values 2 . valid = true ; values 1 . valid = false ; return values 2 ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { if ( values 1 == null ) { values 1 = new values ( this ) ; values 2 = new values ( this ) ; } if ( ! values 1 . valid ) { values 1 . reset ( ) ; values 1 . valid = true ; values 2 . valid = false ; return values 1 ; } values 2 . reset ( ) ; values 2 . valid = true ; values 1 . valid = false ; return values 2 ; } <SENTENCE_END/>


Original Name keys

keys

<SENTENCE_START> { if ( keys 1 == null ) { keys 1 = new keys ( this ) ; keys 2 = new keys ( this ) ; } if ( ! keys 1 . valid ) { keys 1 . reset ( ) ; keys 1 . valid = true ; keys 2 . valid = false ; return keys 1 ; } keys 2 . reset ( ) ; keys 2 . valid = true ; keys 1 . valid = false ; return keys 2 ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { if ( keys 1 == null ) { keys 1 = new keys ( this ) ; keys 2 = new keys ( this ) ; } if ( ! keys 1 . valid ) { keys 1 . reset ( ) ; keys 1 . valid = true ; keys 2 . valid = false ; return keys 1 ; } keys 2 . reset ( ) ; keys 2 . valid = true ; keys 1 . valid = false ; return keys 2 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( keys 1 == null ) { keys 1 = new keys ( this ) ; keys 2 = new keys ( this ) ; } if ( ! keys 1 . valid ) { keys 1 . reset ( ) ; keys 1 . valid = true ; keys 2 . valid = false ; return keys 1 ; } keys 2 . reset ( ) ; keys 2 . valid = true ; keys 1 . valid = false ; return keys 2 ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { if ( keys 1 == null ) { keys 1 = new keys ( this ) ; keys 2 = new keys ( this ) ; } if ( ! keys 1 . valid ) { keys 1 . reset ( ) ; keys 1 . valid = true ; keys 2 . valid = false ; return keys 1 ; } keys 2 . reset ( ) ; keys 2 . valid = true ; keys 1 . valid = false ; return keys 2 ; } <SENTENCE_END/>


Original Name reset

reset

<SENTENCE_START> { current index = index illegal ; next index = index zero ; if ( map . has zero value ) has next = true ; else find next index ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.3%)

<SENTENCE_START> { current index = index illegal ; next index = index zero ; if ( map . has zero value ) has next = true ; else find next index ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { current index = index illegal ; next index = index zero ; if ( map . has zero value ) has next = true ; else find next index ( ) ; } <SENTENCE_END/>

(Copy Probability: 11.9%)

<SENTENCE_START> { current index = index illegal ; next index = index zero ; if ( map . has zero value ) has next = true ; else find next index ( ) ; } <SENTENCE_END/>


Original Name find,next,index

find

<SENTENCE_START> { has next = false ; int [ ] key table = map . key table ; for ( int n = map . capacity + map . stash size ; ++ next index < n ; ) { if ( key table [ next index ] != empty ) { has next = true ; break ; } } } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { has next = false ; int [ ] key table = map . key table ; for ( int n = map . capacity + map . stash size ; ++ next index < n ; ) { if ( key table [ next index ] != empty ) { has next = true ; break ; } } } <SENTENCE_END/>

next

<SENTENCE_START> { has next = false ; int [ ] key table = map . key table ; for ( int n = map . capacity + map . stash size ; ++ next index < n ; ) { if ( key table [ next index ] != empty ) { has next = true ; break ; } } } <SENTENCE_END/>

(Copy Probability: 27.0%)

<SENTENCE_START> { has next = false ; int [ ] key table = map . key table ; for ( int n = map . capacity + map . stash size ; ++ next index < n ; ) { if ( key table [ next index ] != empty ) { has next = true ; break ; } } } <SENTENCE_END/>

index

<SENTENCE_START> { has next = false ; int [ ] key table = map . key table ; for ( int n = map . capacity + map . stash size ; ++ next index < n ; ) { if ( key table [ next index ] != empty ) { has next = true ; break ; } } } <SENTENCE_END/>

(Copy Probability: 8.7%)

<SENTENCE_START> { has next = false ; int [ ] key table = map . key table ; for ( int n = map . capacity + map . stash size ; ++ next index < n ; ) { if ( key table [ next index ] != empty ) { has next = true ; break ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { has next = false ; int [ ] key table = map . key table ; for ( int n = map . capacity + map . stash size ; ++ next index < n ; ) { if ( key table [ next index ] != empty ) { has next = true ; break ; } } } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { has next = false ; int [ ] key table = map . key table ; for ( int n = map . capacity + map . stash size ; ++ next index < n ; ) { if ( key table [ next index ] != empty ) { has next = true ; break ; } } } <SENTENCE_END/>


Original Name remove

remove

<SENTENCE_START> { if ( current index == index zero && map . has zero value ) { map . has zero value = false ; } else if ( current index < 0 ) { throw new illegal state exception ( "next must be called before remove." ) ; } else if ( current index >= map . capacity ) { map . remove stash index ( current index ) ; next index = current index - 1 ; find next index ( ) ; } else { map . key table [ current index ] = empty ; } current index = index illegal ; map . size -- ; } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { if ( current index == index zero && map . has zero value ) { map . has zero value = false ; } else if ( current index < 0 ) { throw new illegal state exception ( "next must be called before remove." ) ; } else if ( current index >= map . capacity ) { map . remove stash index ( current index ) ; next index = current index - 1 ; find next index ( ) ; } else { map . key table [ current index ] = empty ; } current index = index illegal ; map . size -- ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( current index == index zero && map . has zero value ) { map . has zero value = false ; } else if ( current index < 0 ) { throw new illegal state exception ( "next must be called before remove." ) ; } else if ( current index >= map . capacity ) { map . remove stash index ( current index ) ; next index = current index - 1 ; find next index ( ) ; } else { map . key table [ current index ] = empty ; } current index = index illegal ; map . size -- ; } <SENTENCE_END/>

(Copy Probability: 11.2%)

<SENTENCE_START> { if ( current index == index zero && map . has zero value ) { map . has zero value = false ; } else if ( current index < 0 ) { throw new illegal state exception ( "next must be called before remove." ) ; } else if ( current index >= map . capacity ) { map . remove stash index ( current index ) ; next index = current index - 1 ; find next index ( ) ; } else { map . key table [ current index ] = empty ; } current index = index illegal ; map . size -- ; } <SENTENCE_END/>


Original Name next

next

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int [ ] key table = map . key table ; if ( next index == index zero ) { entry . key = 0 ; entry . value = map . zero value ; } else { entry . key = key table [ next index ] ; entry . value = map . value table [ next index ] ; } current index = next index ; find next index ( ) ; return entry ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int [ ] key table = map . key table ; if ( next index == index zero ) { entry . key = 0 ; entry . value = map . zero value ; } else { entry . key = key table [ next index ] ; entry . value = map . value table [ next index ] ; } current index = next index ; find next index ( ) ; return entry ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int [ ] key table = map . key table ; if ( next index == index zero ) { entry . key = 0 ; entry . value = map . zero value ; } else { entry . key = key table [ next index ] ; entry . value = map . value table [ next index ] ; } current index = next index ; find next index ( ) ; return entry ; } <SENTENCE_END/>

(Copy Probability: 4.2%)

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int [ ] key table = map . key table ; if ( next index == index zero ) { entry . key = 0 ; entry . value = map . zero value ; } else { entry . key = key table [ next index ] ; entry . value = map . value table [ next index ] ; } current index = next index ; find next index ( ) ; return entry ; } <SENTENCE_END/>


Original Name has,next

has

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

next

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>


Original Name iterator

iterator

<SENTENCE_START> { return this ; } <SENTENCE_END/>

(Copy Probability: 0.2%)

<SENTENCE_START> { return this ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return this ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return this ; } <SENTENCE_END/>


Original Name has,next

has

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

next

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>


Original Name next

next

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int value ; if ( next index == index zero ) value = map . zero value ; else value = map . value table [ next index ] ; current index = next index ; find next index ( ) ; return value ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int value ; if ( next index == index zero ) value = map . zero value ; else value = map . value table [ next index ] ; current index = next index ; find next index ( ) ; return value ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int value ; if ( next index == index zero ) value = map . zero value ; else value = map . value table [ next index ] ; current index = next index ; find next index ( ) ; return value ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int value ; if ( next index == index zero ) value = map . zero value ; else value = map . value table [ next index ] ; current index = next index ; find next index ( ) ; return value ; } <SENTENCE_END/>


Original Name to,array

to

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

array

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>


Original Name has,next

has

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

next

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; return %SELF% ; } <SENTENCE_END/>


Original Name next

next

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int key = next index == index zero ? 0 : map . key table [ next index ] ; current index = next index ; find next index ( ) ; return key ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int key = next index == index zero ? 0 : map . key table [ next index ] ; current index = next index ; find next index ( ) ; return key ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int key = next index == index zero ? 0 : map . key table [ next index ] ; current index = next index ; find next index ( ) ; return key ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { if ( ! has next ) throw new no such element exception ( ) ; if ( ! valid ) throw new gdx runtime exception ( "#iterator() cannot be used nested." ) ; int key = next index == index zero ? 0 : map . key table [ next index ] ; current index = next index ; find next index ( ) ; return key ; } <SENTENCE_END/>


Original Name to,array

to

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

array

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { int array array = new int array ( true , map . size ) ; while ( has next ) array . add ( next ( ) ) ; return array ; } <SENTENCE_END/>


Original Name window,closed

window

<SENTENCE_START> { system . exit ( 0 ) ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { system . exit ( 0 ) ; } <SENTENCE_END/>

closed

<SENTENCE_START> { system . exit ( 0 ) ; } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { system . exit ( 0 ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { system . exit ( 0 ) ; } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { system . exit ( 0 ) ; } <SENTENCE_END/>


Original Name reload,rows

reload

<SENTENCE_START> { event queue . invoke later ( new runnable ( ) { public void run ( ) { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } } ) ; } <SENTENCE_END/>

(Copy Probability: 4.2%)

<SENTENCE_START> { event queue . invoke later ( new runnable ( ) { public void run ( ) { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } } ) ; } <SENTENCE_END/>

rows

<SENTENCE_START> { event queue . invoke later ( new runnable ( ) { public void run ( ) { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } } ) ; } <SENTENCE_END/>

(Copy Probability: 13.5%)

<SENTENCE_START> { event queue . invoke later ( new runnable ( ) { public void run ( ) { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } } ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { event queue . invoke later ( new runnable ( ) { public void run ( ) { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } } ) ; } <SENTENCE_END/>

(Copy Probability: 6.1%)

<SENTENCE_START> { event queue . invoke later ( new runnable ( ) { public void run ( ) { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } } ) ; } <SENTENCE_END/>


Original Name run

run

<SENTENCE_START> { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } <SENTENCE_END/>

(Copy Probability: 4.2%)

<SENTENCE_START> { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } <SENTENCE_END/>

(Copy Probability: 15.1%)

<SENTENCE_START> { edit rows panel . remove all ( ) ; add editor row ( new numeric panel ( pixels per meter , "Pixels per meter" , "" ) ) ; add editor row ( new numeric panel ( zoom level , "Zoom level" , "" ) ) ; add editor row ( new numeric panel ( delta multiplier , "Delta multiplier" , "" ) ) ; add editor row ( new gradient panel ( background color , "Background color" , "" , true ) ) ; rows panel . remove all ( ) ; particle emitter emitter = get emitter ( ) ; add row ( new image panel ( particle editor . this , "Image" , "" ) ) ; add row ( new count panel ( particle editor . this , "Count" , "Min number of particles at all times, max number of particles allowed." ) ) ; add row ( new ranged numeric panel ( emitter . get delay ( ) , "Delay" , "Time from beginning of effect to emission start, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get duration ( ) , "Duration" , "Time particles will be emitted, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get emission ( ) , "Duration" , "Emission" , "Number of particles emitted per second." ) ) ; add row ( new scaled numeric panel ( emitter . get life ( ) , "Duration" , "Life" , "Time particles will live, in milliseconds." ) ) ; add row ( new scaled numeric panel ( emitter . get life offset ( ) , "Duration" , "Life Offset" , "Particle starting life consumed, in milliseconds." ) ) ; add row ( new ranged numeric panel ( emitter . get x offset value ( ) , "X Offset" , "Amount to offset a particle's starting X location, in world units." ) ) ; add row ( new ranged numeric panel ( emitter . get y offset value ( ) , "Y Offset" , "Amount to offset a particle's starting Y location, in world units." ) ) ; add row ( new spawn panel ( particle editor . this , emitter . get spawn shape ( ) , "Spawn" , "Shape used to spawn particles." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn width ( ) , "Duration" , "Spawn Width" , "Width of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get spawn height ( ) , "Duration" , "Spawn Height" , "Height of the spawn shape, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get scale ( ) , "Life" , "Size" , "Particle size, in world units." ) ) ; add row ( new scaled numeric panel ( emitter . get velocity ( ) , "Life" , "Velocity" , "Particle speed, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get angle ( ) , "Life" , "Angle" , "Particle emission angle, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get rotation ( ) , "Life" , "Rotation" , "Particle rotation, in degrees." ) ) ; add row ( new scaled numeric panel ( emitter . get wind ( ) , "Life" , "Wind" , "Wind strength, in world units per second." ) ) ; add row ( new scaled numeric panel ( emitter . get gravity ( ) , "Life" , "Gravity" , "Gravity strength, in world units per second." ) ) ; add row ( new gradient panel ( emitter . get tint ( ) , "Tint" , "" , false ) ) ; add row ( new percentage panel ( emitter . get transparency ( ) , "Life" , "Transparency" , "" ) ) ; add row ( new options panel ( particle editor . this , "Options" , "" ) ) ; for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel ) ( ( editor panel ) component ) . update ( particle editor . this ) ; rows panel . repaint ( ) ; } <SENTENCE_END/>


Original Name add,editor,row

add

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; edit rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

(Copy Probability: 5.3%)

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; edit rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

editor

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; edit rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

(Copy Probability: 15.4%)

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; edit rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

row

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; edit rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

(Copy Probability: 6.6%)

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; edit rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; edit rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; edit rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>


Original Name add,row

add

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

(Copy Probability: 5.8%)

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

row

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

(Copy Probability: 22.7%)

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>

(Copy Probability: 6.8%)

<SENTENCE_START> { row . set border ( border factory . create matte border ( 0 , 0 , 1 , 0 , java . awt . color . black ) ) ; rows panel . add ( row , new grid bag constraints ( 0 , - 1 , 1 , 1 , 1 , 0 , grid bag constraints . center , grid bag constraints . horizontal , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; } <SENTENCE_END/>


Original Name set,visible

set

<SENTENCE_START> { for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel && ( ( editor panel ) component ) . get name ( ) . equals ( name ) ) component . %SELF% ( visible ) ; } <SENTENCE_END/>

(Copy Probability: 2.0%)

<SENTENCE_START> { for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel && ( ( editor panel ) component ) . get name ( ) . equals ( name ) ) component . %SELF% ( visible ) ; } <SENTENCE_END/>

visible

<SENTENCE_START> { for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel && ( ( editor panel ) component ) . get name ( ) . equals ( name ) ) component . %SELF% ( visible ) ; } <SENTENCE_END/>

(Copy Probability: 19.1%)

<SENTENCE_START> { for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel && ( ( editor panel ) component ) . get name ( ) . equals ( name ) ) component . %SELF% ( visible ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel && ( ( editor panel ) component ) . get name ( ) . equals ( name ) ) component . %SELF% ( visible ) ; } <SENTENCE_END/>

(Copy Probability: 11.7%)

<SENTENCE_START> { for ( component component : rows panel . get components ( ) ) if ( component instanceof editor panel && ( ( editor panel ) component ) . get name ( ) . equals ( name ) ) component . %SELF% ( visible ) ; } <SENTENCE_END/>


Original Name get,emitter

get

<SENTENCE_START> { return effect . get emitters ( ) . get ( effect panel . edit index ) ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { return effect . get emitters ( ) . get ( effect panel . edit index ) ; } <SENTENCE_END/>

emitter

<SENTENCE_START> { return effect . get emitters ( ) . get ( effect panel . edit index ) ; } <SENTENCE_END/>

(Copy Probability: 2.2%)

<SENTENCE_START> { return effect . get emitters ( ) . get ( effect panel . edit index ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return effect . get emitters ( ) . get ( effect panel . edit index ) ; } <SENTENCE_END/>

(Copy Probability: 1.6%)

<SENTENCE_START> { return effect . get emitters ( ) . get ( effect panel . edit index ) ; } <SENTENCE_END/>


Original Name get,icon

get

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; string image path = emitter . get image path ( ) ; if ( data . icon == null && image path != null ) { try { url url ; file file = new file ( image path ) ; if ( file . exists ( ) ) url = file . to uri ( ) . to url ( ) ; else { url = particle editor . class . get resource ( image path ) ; if ( url == null ) return null ; } data . icon = new image icon ( url ) ; } catch ( malformed url exception ex ) { ex . print stack trace ( ) ; } } return data . icon ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; string image path = emitter . get image path ( ) ; if ( data . icon == null && image path != null ) { try { url url ; file file = new file ( image path ) ; if ( file . exists ( ) ) url = file . to uri ( ) . to url ( ) ; else { url = particle editor . class . get resource ( image path ) ; if ( url == null ) return null ; } data . icon = new image icon ( url ) ; } catch ( malformed url exception ex ) { ex . print stack trace ( ) ; } } return data . icon ; } <SENTENCE_END/>

icon

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; string image path = emitter . get image path ( ) ; if ( data . icon == null && image path != null ) { try { url url ; file file = new file ( image path ) ; if ( file . exists ( ) ) url = file . to uri ( ) . to url ( ) ; else { url = particle editor . class . get resource ( image path ) ; if ( url == null ) return null ; } data . icon = new image icon ( url ) ; } catch ( malformed url exception ex ) { ex . print stack trace ( ) ; } } return data . icon ; } <SENTENCE_END/>

(Copy Probability: 26.1%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; string image path = emitter . get image path ( ) ; if ( data . icon == null && image path != null ) { try { url url ; file file = new file ( image path ) ; if ( file . exists ( ) ) url = file . to uri ( ) . to url ( ) ; else { url = particle editor . class . get resource ( image path ) ; if ( url == null ) return null ; } data . icon = new image icon ( url ) ; } catch ( malformed url exception ex ) { ex . print stack trace ( ) ; } } return data . icon ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; string image path = emitter . get image path ( ) ; if ( data . icon == null && image path != null ) { try { url url ; file file = new file ( image path ) ; if ( file . exists ( ) ) url = file . to uri ( ) . to url ( ) ; else { url = particle editor . class . get resource ( image path ) ; if ( url == null ) return null ; } data . icon = new image icon ( url ) ; } catch ( malformed url exception ex ) { ex . print stack trace ( ) ; } } return data . icon ; } <SENTENCE_END/>

(Copy Probability: 12.1%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; string image path = emitter . get image path ( ) ; if ( data . icon == null && image path != null ) { try { url url ; file file = new file ( image path ) ; if ( file . exists ( ) ) url = file . to uri ( ) . to url ( ) ; else { url = particle editor . class . get resource ( image path ) ; if ( url == null ) return null ; } data . icon = new image icon ( url ) ; } catch ( malformed url exception ex ) { ex . print stack trace ( ) ; } } return data . icon ; } <SENTENCE_END/>


Original Name set,icon

set

<SENTENCE_START> { particle data data = particle data . get ( emitters ) ; if ( data == null ) particle data . put ( emitters , data = new particle data ( ) ) ; data . icon = icon ; } <SENTENCE_END/>

(Copy Probability: 1.8%)

<SENTENCE_START> { particle data data = particle data . get ( emitters ) ; if ( data == null ) particle data . put ( emitters , data = new particle data ( ) ) ; data . icon = icon ; } <SENTENCE_END/>

icon

<SENTENCE_START> { particle data data = particle data . get ( emitters ) ; if ( data == null ) particle data . put ( emitters , data = new particle data ( ) ) ; data . icon = icon ; } <SENTENCE_END/>

(Copy Probability: 88.9%)

<SENTENCE_START> { particle data data = particle data . get ( emitters ) ; if ( data == null ) particle data . put ( emitters , data = new particle data ( ) ) ; data . icon = icon ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { particle data data = particle data . get ( emitters ) ; if ( data == null ) particle data . put ( emitters , data = new particle data ( ) ) ; data . icon = icon ; } <SENTENCE_END/>

(Copy Probability: 6.3%)

<SENTENCE_START> { particle data data = particle data . get ( emitters ) ; if ( data == null ) particle data . put ( emitters , data = new particle data ( ) ) ; data . icon = icon ; } <SENTENCE_END/>


Original Name set,enabled

set

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; data . enabled = enabled ; emitter . reset ( ) ; } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; data . enabled = enabled ; emitter . reset ( ) ; } <SENTENCE_END/>

enabled

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; data . enabled = enabled ; emitter . reset ( ) ; } <SENTENCE_END/>

(Copy Probability: 76.4%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; data . enabled = enabled ; emitter . reset ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; data . enabled = enabled ; emitter . reset ( ) ; } <SENTENCE_END/>

(Copy Probability: 6.7%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) particle data . put ( emitter , data = new particle data ( ) ) ; data . enabled = enabled ; emitter . reset ( ) ; } <SENTENCE_END/>


Original Name is,enabled

is

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) return true ; return data . enabled ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) return true ; return data . enabled ; } <SENTENCE_END/>

enabled

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) return true ; return data . enabled ; } <SENTENCE_END/>

(Copy Probability: 88.3%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) return true ; return data . enabled ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) return true ; return data . enabled ; } <SENTENCE_END/>

(Copy Probability: 27.4%)

<SENTENCE_START> { particle data data = particle data . get ( emitter ) ; if ( data == null ) return true ; return data . enabled ; } <SENTENCE_END/>


Original Name initialize,components

initialize

<SENTENCE_START> { split pane = new j split pane ( ) ; split pane . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; split pane . set divider size ( 4 ) ; get content pane ( ) . add ( split pane , border layout . center ) ; { j split pane right split = new j split pane ( j split pane . vertical split ) ; right split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; right split . set divider size ( 4 ) ; split pane . add ( right split , j split pane . right ) ; { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . top ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Editor Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { edit rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( edit rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . bottom ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Emitter Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } right split . set divider location ( 200 ) ; } { j split pane left split = new j split pane ( j split pane . vertical split ) ; left split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; left split . set divider size ( 4 ) ; split pane . add ( left split , j split pane . left ) ; { j panel spacer = new j panel ( new border layout ( ) ) ; left split . add ( spacer , j split pane . top ) ; spacer . add ( lwjgl canvas . get canvas ( ) ) ; spacer . set border ( border factory . create empty border ( 0 , 0 , 0 , 4 ) ) ; } { j panel emitters panel = new j panel ( new border layout ( ) ) ; left split . add ( emitters panel , j split pane . bottom ) ; emitters panel . set border ( new compound border ( border factory . create empty border ( 0 , 6 , 6 , 0 ) , border factory . create titled border ( "Effect Emitters" ) ) ) ; { effect panel = new effect panel ( this ) ; emitters panel . add ( effect panel ) ; } } left split . set divider location ( 575 ) ; } split pane . set divider location ( 325 ) ; } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { split pane = new j split pane ( ) ; split pane . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; split pane . set divider size ( 4 ) ; get content pane ( ) . add ( split pane , border layout . center ) ; { j split pane right split = new j split pane ( j split pane . vertical split ) ; right split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; right split . set divider size ( 4 ) ; split pane . add ( right split , j split pane . right ) ; { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . top ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Editor Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { edit rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( edit rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . bottom ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Emitter Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } right split . set divider location ( 200 ) ; } { j split pane left split = new j split pane ( j split pane . vertical split ) ; left split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; left split . set divider size ( 4 ) ; split pane . add ( left split , j split pane . left ) ; { j panel spacer = new j panel ( new border layout ( ) ) ; left split . add ( spacer , j split pane . top ) ; spacer . add ( lwjgl canvas . get canvas ( ) ) ; spacer . set border ( border factory . create empty border ( 0 , 0 , 0 , 4 ) ) ; } { j panel emitters panel = new j panel ( new border layout ( ) ) ; left split . add ( emitters panel , j split pane . bottom ) ; emitters panel . set border ( new compound border ( border factory . create empty border ( 0 , 6 , 6 , 0 ) , border factory . create titled border ( "Effect Emitters" ) ) ) ; { effect panel = new effect panel ( this ) ; emitters panel . add ( effect panel ) ; } } left split . set divider location ( 575 ) ; } split pane . set divider location ( 325 ) ; } <SENTENCE_END/>

components

<SENTENCE_START> { split pane = new j split pane ( ) ; split pane . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; split pane . set divider size ( 4 ) ; get content pane ( ) . add ( split pane , border layout . center ) ; { j split pane right split = new j split pane ( j split pane . vertical split ) ; right split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; right split . set divider size ( 4 ) ; split pane . add ( right split , j split pane . right ) ; { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . top ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Editor Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { edit rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( edit rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . bottom ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Emitter Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } right split . set divider location ( 200 ) ; } { j split pane left split = new j split pane ( j split pane . vertical split ) ; left split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; left split . set divider size ( 4 ) ; split pane . add ( left split , j split pane . left ) ; { j panel spacer = new j panel ( new border layout ( ) ) ; left split . add ( spacer , j split pane . top ) ; spacer . add ( lwjgl canvas . get canvas ( ) ) ; spacer . set border ( border factory . create empty border ( 0 , 0 , 0 , 4 ) ) ; } { j panel emitters panel = new j panel ( new border layout ( ) ) ; left split . add ( emitters panel , j split pane . bottom ) ; emitters panel . set border ( new compound border ( border factory . create empty border ( 0 , 6 , 6 , 0 ) , border factory . create titled border ( "Effect Emitters" ) ) ) ; { effect panel = new effect panel ( this ) ; emitters panel . add ( effect panel ) ; } } left split . set divider location ( 575 ) ; } split pane . set divider location ( 325 ) ; } <SENTENCE_END/>

(Copy Probability: 12.5%)

<SENTENCE_START> { split pane = new j split pane ( ) ; split pane . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; split pane . set divider size ( 4 ) ; get content pane ( ) . add ( split pane , border layout . center ) ; { j split pane right split = new j split pane ( j split pane . vertical split ) ; right split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; right split . set divider size ( 4 ) ; split pane . add ( right split , j split pane . right ) ; { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . top ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Editor Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { edit rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( edit rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . bottom ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Emitter Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } right split . set divider location ( 200 ) ; } { j split pane left split = new j split pane ( j split pane . vertical split ) ; left split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; left split . set divider size ( 4 ) ; split pane . add ( left split , j split pane . left ) ; { j panel spacer = new j panel ( new border layout ( ) ) ; left split . add ( spacer , j split pane . top ) ; spacer . add ( lwjgl canvas . get canvas ( ) ) ; spacer . set border ( border factory . create empty border ( 0 , 0 , 0 , 4 ) ) ; } { j panel emitters panel = new j panel ( new border layout ( ) ) ; left split . add ( emitters panel , j split pane . bottom ) ; emitters panel . set border ( new compound border ( border factory . create empty border ( 0 , 6 , 6 , 0 ) , border factory . create titled border ( "Effect Emitters" ) ) ) ; { effect panel = new effect panel ( this ) ; emitters panel . add ( effect panel ) ; } } left split . set divider location ( 575 ) ; } split pane . set divider location ( 325 ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { split pane = new j split pane ( ) ; split pane . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; split pane . set divider size ( 4 ) ; get content pane ( ) . add ( split pane , border layout . center ) ; { j split pane right split = new j split pane ( j split pane . vertical split ) ; right split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; right split . set divider size ( 4 ) ; split pane . add ( right split , j split pane . right ) ; { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . top ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Editor Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { edit rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( edit rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . bottom ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Emitter Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } right split . set divider location ( 200 ) ; } { j split pane left split = new j split pane ( j split pane . vertical split ) ; left split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; left split . set divider size ( 4 ) ; split pane . add ( left split , j split pane . left ) ; { j panel spacer = new j panel ( new border layout ( ) ) ; left split . add ( spacer , j split pane . top ) ; spacer . add ( lwjgl canvas . get canvas ( ) ) ; spacer . set border ( border factory . create empty border ( 0 , 0 , 0 , 4 ) ) ; } { j panel emitters panel = new j panel ( new border layout ( ) ) ; left split . add ( emitters panel , j split pane . bottom ) ; emitters panel . set border ( new compound border ( border factory . create empty border ( 0 , 6 , 6 , 0 ) , border factory . create titled border ( "Effect Emitters" ) ) ) ; { effect panel = new effect panel ( this ) ; emitters panel . add ( effect panel ) ; } } left split . set divider location ( 575 ) ; } split pane . set divider location ( 325 ) ; } <SENTENCE_END/>

(Copy Probability: 10.8%)

<SENTENCE_START> { split pane = new j split pane ( ) ; split pane . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; split pane . set divider size ( 4 ) ; get content pane ( ) . add ( split pane , border layout . center ) ; { j split pane right split = new j split pane ( j split pane . vertical split ) ; right split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; right split . set divider size ( 4 ) ; split pane . add ( right split , j split pane . right ) ; { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . top ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Editor Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { edit rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( edit rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } { j panel properties panel = new j panel ( new grid bag layout ( ) ) ; right split . add ( properties panel , j split pane . bottom ) ; properties panel . set border ( new compound border ( border factory . create empty border ( 3 , 0 , 6 , 6 ) , border factory . create titled border ( "Emitter Properties" ) ) ) ; { j scroll pane scroll = new j scroll pane ( ) ; properties panel . add ( scroll , new grid bag constraints ( 0 , 0 , 1 , 1 , 1 , 1 , grid bag constraints . north , grid bag constraints . both , new insets ( 0 , 0 , 0 , 0 ) , 0 , 0 ) ) ; scroll . set border ( border factory . create empty border ( 0 , 0 , 0 , 0 ) ) ; { rows panel = new j panel ( new grid bag layout ( ) ) ; scroll . set viewport view ( rows panel ) ; scroll . get vertical scroll bar ( ) . set unit increment ( 70 ) ; } } } right split . set divider location ( 200 ) ; } { j split pane left split = new j split pane ( j split pane . vertical split ) ; left split . set ui ( new basic split pane ui ( ) { public void paint ( graphics g , j component jc ) { } } ) ; left split . set divider size ( 4 ) ; split pane . add ( left split , j split pane . left ) ; { j panel spacer = new j panel ( new border layout ( ) ) ; left split . add ( spacer , j split pane . top ) ; spacer . add ( lwjgl canvas . get canvas ( ) ) ; spacer . set border ( border factory . create empty border ( 0 , 0 , 0 , 4 ) ) ; } { j panel emitters panel = new j panel ( new border layout ( ) ) ; left split . add ( emitters panel , j split pane . bottom ) ; emitters panel . set border ( new compound border ( border factory . create empty border ( 0 , 6 , 6 , 0 ) , border factory . create titled border ( "Effect Emitters" ) ) ) ; { effect panel = new effect panel ( this ) ; emitters panel . add ( effect panel ) ; } } left split . set divider location ( 575 ) ; } split pane . set divider location ( 325 ) ; } <SENTENCE_END/>


Original Name paint

paint

<SENTENCE_START> { } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { } <SENTENCE_END/>

%END%

<SENTENCE_START> { } <SENTENCE_END/>

(Copy Probability: 0.0%)

<SENTENCE_START> { } <SENTENCE_END/>


Original Name paint

paint

<SENTENCE_START> { } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { } <SENTENCE_END/>

%END%

<SENTENCE_START> { } <SENTENCE_END/>

(Copy Probability: 0.0%)

<SENTENCE_START> { } <SENTENCE_END/>


Original Name paint

paint

<SENTENCE_START> { } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { } <SENTENCE_END/>

%END%

<SENTENCE_START> { } <SENTENCE_END/>

(Copy Probability: 0.0%)

<SENTENCE_START> { } <SENTENCE_END/>


Original Name create

create

<SENTENCE_START> { if ( sprite batch != null ) return ; sprite batch = new sprite batch ( ) ; world camera = new orthographic camera ( ) ; text camera = new orthographic camera ( ) ; pixels per meter = new numeric value ( ) ; pixels per meter . set value ( 1.0f ) ; pixels per meter . set always active ( true ) ; zoom level = new numeric value ( ) ; zoom level . set value ( 1.0f ) ; zoom level . set always active ( true ) ; delta multiplier = new numeric value ( ) ; delta multiplier . set value ( 1.0f ) ; delta multiplier . set always active ( true ) ; background color = new gradient color value ( ) ; background color . set colors ( new float [ ] { 0f , 0f , 0f } ) ; font = new bitmap font ( gdx . files . get file handle ( "default.fnt" , file type . internal ) , gdx . files . get file handle ( "default.png" , file type . internal ) , true ) ; effect panel . new example emitter ( "Untitled" , true ) ; gdx . input . set input processor ( this ) ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { if ( sprite batch != null ) return ; sprite batch = new sprite batch ( ) ; world camera = new orthographic camera ( ) ; text camera = new orthographic camera ( ) ; pixels per meter = new numeric value ( ) ; pixels per meter . set value ( 1.0f ) ; pixels per meter . set always active ( true ) ; zoom level = new numeric value ( ) ; zoom level . set value ( 1.0f ) ; zoom level . set always active ( true ) ; delta multiplier = new numeric value ( ) ; delta multiplier . set value ( 1.0f ) ; delta multiplier . set always active ( true ) ; background color = new gradient color value ( ) ; background color . set colors ( new float [ ] { 0f , 0f , 0f } ) ; font = new bitmap font ( gdx . files . get file handle ( "default.fnt" , file type . internal ) , gdx . files . get file handle ( "default.png" , file type . internal ) , true ) ; effect panel . new example emitter ( "Untitled" , true ) ; gdx . input . set input processor ( this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( sprite batch != null ) return ; sprite batch = new sprite batch ( ) ; world camera = new orthographic camera ( ) ; text camera = new orthographic camera ( ) ; pixels per meter = new numeric value ( ) ; pixels per meter . set value ( 1.0f ) ; pixels per meter . set always active ( true ) ; zoom level = new numeric value ( ) ; zoom level . set value ( 1.0f ) ; zoom level . set always active ( true ) ; delta multiplier = new numeric value ( ) ; delta multiplier . set value ( 1.0f ) ; delta multiplier . set always active ( true ) ; background color = new gradient color value ( ) ; background color . set colors ( new float [ ] { 0f , 0f , 0f } ) ; font = new bitmap font ( gdx . files . get file handle ( "default.fnt" , file type . internal ) , gdx . files . get file handle ( "default.png" , file type . internal ) , true ) ; effect panel . new example emitter ( "Untitled" , true ) ; gdx . input . set input processor ( this ) ; } <SENTENCE_END/>

(Copy Probability: 10.9%)

<SENTENCE_START> { if ( sprite batch != null ) return ; sprite batch = new sprite batch ( ) ; world camera = new orthographic camera ( ) ; text camera = new orthographic camera ( ) ; pixels per meter = new numeric value ( ) ; pixels per meter . set value ( 1.0f ) ; pixels per meter . set always active ( true ) ; zoom level = new numeric value ( ) ; zoom level . set value ( 1.0f ) ; zoom level . set always active ( true ) ; delta multiplier = new numeric value ( ) ; delta multiplier . set value ( 1.0f ) ; delta multiplier . set always active ( true ) ; background color = new gradient color value ( ) ; background color . set colors ( new float [ ] { 0f , 0f , 0f } ) ; font = new bitmap font ( gdx . files . get file handle ( "default.fnt" , file type . internal ) , gdx . files . get file handle ( "default.png" , file type . internal ) , true ) ; effect panel . new example emitter ( "Untitled" , true ) ; gdx . input . set input processor ( this ) ; } <SENTENCE_END/>


Original Name render

render

<SENTENCE_START> { int view width = gdx . graphics . get width ( ) ; int view height = gdx . graphics . get height ( ) ; float delta = math . max ( 0 , gdx . graphics . get delta time ( ) * delta multiplier . get value ( ) ) ; float [ ] colors = background color . get colors ( ) ; gdx . gl . gl clear color ( colors [ 0 ] , colors [ 1 ] , colors [ 2 ] , 1.0f ) ; gdx . gl . gl clear ( gl 20 . gl color buffer bit ) ; if ( ( pixels per meter . get value ( ) != pixels per meter prev ) || ( zoom level . get value ( ) != zoom level prev ) ) { if ( pixels per meter . get value ( ) <= 0 ) { pixels per meter . set value ( 1 ) ; } world camera . set to ortho ( false , view width / pixels per meter . get value ( ) , view height / pixels per meter . get value ( ) ) ; world camera . zoom = zoom level . get value ( ) ; world camera . update ( ) ; effect . set position ( world camera . viewport width / 2 , world camera . viewport height / 2 ) ; zoom level prev = zoom level . get value ( ) ; pixels per meter prev = pixels per meter . get value ( ) ; } sprite batch . set projection matrix ( world camera . combined ) ; sprite batch . begin ( ) ; sprite batch . enable blending ( ) ; sprite batch . set blend function ( gl 20 . gl src alpha , gl 20 . gl one minus src alpha ) ; if ( bg image != null ) { bg image . set position ( view width / 2 - bg image . get width ( ) / 2 , view height / 2 - bg image . get height ( ) / 2 ) ; bg image . draw ( sprite batch ) ; } active count = 0 ; boolean complete = true ; for ( particle emitter emitter : effect . get emitters ( ) ) { if ( emitter . get sprite ( ) == null && emitter . get image path ( ) != null ) load image ( emitter ) ; boolean enabled = is enabled ( emitter ) ; if ( enabled ) { if ( emitter . get sprite ( ) != null ) emitter . draw ( sprite batch , delta ) ; active count += emitter . get active count ( ) ; if ( ! emitter . is complete ( ) ) complete = false ; } } if ( complete ) effect . start ( ) ; max active = math . max ( max active , active count ) ; max active timer += delta ; if ( max active timer > 3 ) { max active timer = 0 ; last max active = max active ; max active = 0 ; } if ( mouse down ) { } sprite batch . set projection matrix ( text camera . combined ) ; font . draw ( sprite batch , "FPS: " + gdx . graphics . get frames per second ( ) , 5 , 15 ) ; font . draw ( sprite batch , "Count: " + active count , 5 , 35 ) ; font . draw ( sprite batch , "Max: " + last max active , 5 , 55 ) ; font . draw ( sprite batch , ( int ) ( get emitter ( ) . get percent complete ( ) * 100 ) + "%" , 5 , 75 ) ; sprite batch . end ( ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { int view width = gdx . graphics . get width ( ) ; int view height = gdx . graphics . get height ( ) ; float delta = math . max ( 0 , gdx . graphics . get delta time ( ) * delta multiplier . get value ( ) ) ; float [ ] colors = background color . get colors ( ) ; gdx . gl . gl clear color ( colors [ 0 ] , colors [ 1 ] , colors [ 2 ] , 1.0f ) ; gdx . gl . gl clear ( gl 20 . gl color buffer bit ) ; if ( ( pixels per meter . get value ( ) != pixels per meter prev ) || ( zoom level . get value ( ) != zoom level prev ) ) { if ( pixels per meter . get value ( ) <= 0 ) { pixels per meter . set value ( 1 ) ; } world camera . set to ortho ( false , view width / pixels per meter . get value ( ) , view height / pixels per meter . get value ( ) ) ; world camera . zoom = zoom level . get value ( ) ; world camera . update ( ) ; effect . set position ( world camera . viewport width / 2 , world camera . viewport height / 2 ) ; zoom level prev = zoom level . get value ( ) ; pixels per meter prev = pixels per meter . get value ( ) ; } sprite batch . set projection matrix ( world camera . combined ) ; sprite batch . begin ( ) ; sprite batch . enable blending ( ) ; sprite batch . set blend function ( gl 20 . gl src alpha , gl 20 . gl one minus src alpha ) ; if ( bg image != null ) { bg image . set position ( view width / 2 - bg image . get width ( ) / 2 , view height / 2 - bg image . get height ( ) / 2 ) ; bg image . draw ( sprite batch ) ; } active count = 0 ; boolean complete = true ; for ( particle emitter emitter : effect . get emitters ( ) ) { if ( emitter . get sprite ( ) == null && emitter . get image path ( ) != null ) load image ( emitter ) ; boolean enabled = is enabled ( emitter ) ; if ( enabled ) { if ( emitter . get sprite ( ) != null ) emitter . draw ( sprite batch , delta ) ; active count += emitter . get active count ( ) ; if ( ! emitter . is complete ( ) ) complete = false ; } } if ( complete ) effect . start ( ) ; max active = math . max ( max active , active count ) ; max active timer += delta ; if ( max active timer > 3 ) { max active timer = 0 ; last max active = max active ; max active = 0 ; } if ( mouse down ) { } sprite batch . set projection matrix ( text camera . combined ) ; font . draw ( sprite batch , "FPS: " + gdx . graphics . get frames per second ( ) , 5 , 15 ) ; font . draw ( sprite batch , "Count: " + active count , 5 , 35 ) ; font . draw ( sprite batch , "Max: " + last max active , 5 , 55 ) ; font . draw ( sprite batch , ( int ) ( get emitter ( ) . get percent complete ( ) * 100 ) + "%" , 5 , 75 ) ; sprite batch . end ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int view width = gdx . graphics . get width ( ) ; int view height = gdx . graphics . get height ( ) ; float delta = math . max ( 0 , gdx . graphics . get delta time ( ) * delta multiplier . get value ( ) ) ; float [ ] colors = background color . get colors ( ) ; gdx . gl . gl clear color ( colors [ 0 ] , colors [ 1 ] , colors [ 2 ] , 1.0f ) ; gdx . gl . gl clear ( gl 20 . gl color buffer bit ) ; if ( ( pixels per meter . get value ( ) != pixels per meter prev ) || ( zoom level . get value ( ) != zoom level prev ) ) { if ( pixels per meter . get value ( ) <= 0 ) { pixels per meter . set value ( 1 ) ; } world camera . set to ortho ( false , view width / pixels per meter . get value ( ) , view height / pixels per meter . get value ( ) ) ; world camera . zoom = zoom level . get value ( ) ; world camera . update ( ) ; effect . set position ( world camera . viewport width / 2 , world camera . viewport height / 2 ) ; zoom level prev = zoom level . get value ( ) ; pixels per meter prev = pixels per meter . get value ( ) ; } sprite batch . set projection matrix ( world camera . combined ) ; sprite batch . begin ( ) ; sprite batch . enable blending ( ) ; sprite batch . set blend function ( gl 20 . gl src alpha , gl 20 . gl one minus src alpha ) ; if ( bg image != null ) { bg image . set position ( view width / 2 - bg image . get width ( ) / 2 , view height / 2 - bg image . get height ( ) / 2 ) ; bg image . draw ( sprite batch ) ; } active count = 0 ; boolean complete = true ; for ( particle emitter emitter : effect . get emitters ( ) ) { if ( emitter . get sprite ( ) == null && emitter . get image path ( ) != null ) load image ( emitter ) ; boolean enabled = is enabled ( emitter ) ; if ( enabled ) { if ( emitter . get sprite ( ) != null ) emitter . draw ( sprite batch , delta ) ; active count += emitter . get active count ( ) ; if ( ! emitter . is complete ( ) ) complete = false ; } } if ( complete ) effect . start ( ) ; max active = math . max ( max active , active count ) ; max active timer += delta ; if ( max active timer > 3 ) { max active timer = 0 ; last max active = max active ; max active = 0 ; } if ( mouse down ) { } sprite batch . set projection matrix ( text camera . combined ) ; font . draw ( sprite batch , "FPS: " + gdx . graphics . get frames per second ( ) , 5 , 15 ) ; font . draw ( sprite batch , "Count: " + active count , 5 , 35 ) ; font . draw ( sprite batch , "Max: " + last max active , 5 , 55 ) ; font . draw ( sprite batch , ( int ) ( get emitter ( ) . get percent complete ( ) * 100 ) + "%" , 5 , 75 ) ; sprite batch . end ( ) ; } <SENTENCE_END/>

(Copy Probability: 8.7%)

<SENTENCE_START> { int view width = gdx . graphics . get width ( ) ; int view height = gdx . graphics . get height ( ) ; float delta = math . max ( 0 , gdx . graphics . get delta time ( ) * delta multiplier . get value ( ) ) ; float [ ] colors = background color . get colors ( ) ; gdx . gl . gl clear color ( colors [ 0 ] , colors [ 1 ] , colors [ 2 ] , 1.0f ) ; gdx . gl . gl clear ( gl 20 . gl color buffer bit ) ; if ( ( pixels per meter . get value ( ) != pixels per meter prev ) || ( zoom level . get value ( ) != zoom level prev ) ) { if ( pixels per meter . get value ( ) <= 0 ) { pixels per meter . set value ( 1 ) ; } world camera . set to ortho ( false , view width / pixels per meter . get value ( ) , view height / pixels per meter . get value ( ) ) ; world camera . zoom = zoom level . get value ( ) ; world camera . update ( ) ; effect . set position ( world camera . viewport width / 2 , world camera . viewport height / 2 ) ; zoom level prev = zoom level . get value ( ) ; pixels per meter prev = pixels per meter . get value ( ) ; } sprite batch . set projection matrix ( world camera . combined ) ; sprite batch . begin ( ) ; sprite batch . enable blending ( ) ; sprite batch . set blend function ( gl 20 . gl src alpha , gl 20 . gl one minus src alpha ) ; if ( bg image != null ) { bg image . set position ( view width / 2 - bg image . get width ( ) / 2 , view height / 2 - bg image . get height ( ) / 2 ) ; bg image . draw ( sprite batch ) ; } active count = 0 ; boolean complete = true ; for ( particle emitter emitter : effect . get emitters ( ) ) { if ( emitter . get sprite ( ) == null && emitter . get image path ( ) != null ) load image ( emitter ) ; boolean enabled = is enabled ( emitter ) ; if ( enabled ) { if ( emitter . get sprite ( ) != null ) emitter . draw ( sprite batch , delta ) ; active count += emitter . get active count ( ) ; if ( ! emitter . is complete ( ) ) complete = false ; } } if ( complete ) effect . start ( ) ; max active = math . max ( max active , active count ) ; max active timer += delta ; if ( max active timer > 3 ) { max active timer = 0 ; last max active = max active ; max active = 0 ; } if ( mouse down ) { } sprite batch . set projection matrix ( text camera . combined ) ; font . draw ( sprite batch , "FPS: " + gdx . graphics . get frames per second ( ) , 5 , 15 ) ; font . draw ( sprite batch , "Count: " + active count , 5 , 35 ) ; font . draw ( sprite batch , "Max: " + last max active , 5 , 55 ) ; font . draw ( sprite batch , ( int ) ( get emitter ( ) . get percent complete ( ) * 100 ) + "%" , 5 , 75 ) ; sprite batch . end ( ) ; } <SENTENCE_END/>


Original Name load,image

load

<SENTENCE_START> { final string image path = emitter . get image path ( ) ; string image name = new file ( image path . replace ( '|' , '/' ) ) . get name ( ) ; try { file handle file ; if ( image path . equals ( particle editor . default particle ) || image path . equals ( particle editor . default premult particle ) ) { file = gdx . files . classpath ( image path ) ; } else { if ( ( image path . contains ( "/" ) || image path . contains ( "|" ) ) && ! image name . contains ( ".." ) ) { file = gdx . files . absolute ( image path ) ; if ( ! file . exists ( ) ) { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image name ) . get absolute path ( ) ) ; } } else { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image path ) . get absolute path ( ) ) ; } } emitter . set sprite ( new sprite ( new texture ( file ) ) ) ; if ( effect file != null ) { uri relative uri = effect file . get parent file ( ) . to uri ( ) . relativize ( file . file ( ) . to uri ( ) ) ; emitter . set image path ( relative uri . get path ( ) ) ; } } catch ( gdx runtime exception ex ) { ex . print stack trace ( ) ; event queue . invoke later ( new runnable ( ) { public void run ( ) { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } } ) ; emitter . set image path ( null ) ; } } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { final string image path = emitter . get image path ( ) ; string image name = new file ( image path . replace ( '|' , '/' ) ) . get name ( ) ; try { file handle file ; if ( image path . equals ( particle editor . default particle ) || image path . equals ( particle editor . default premult particle ) ) { file = gdx . files . classpath ( image path ) ; } else { if ( ( image path . contains ( "/" ) || image path . contains ( "|" ) ) && ! image name . contains ( ".." ) ) { file = gdx . files . absolute ( image path ) ; if ( ! file . exists ( ) ) { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image name ) . get absolute path ( ) ) ; } } else { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image path ) . get absolute path ( ) ) ; } } emitter . set sprite ( new sprite ( new texture ( file ) ) ) ; if ( effect file != null ) { uri relative uri = effect file . get parent file ( ) . to uri ( ) . relativize ( file . file ( ) . to uri ( ) ) ; emitter . set image path ( relative uri . get path ( ) ) ; } } catch ( gdx runtime exception ex ) { ex . print stack trace ( ) ; event queue . invoke later ( new runnable ( ) { public void run ( ) { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } } ) ; emitter . set image path ( null ) ; } } <SENTENCE_END/>

image

<SENTENCE_START> { final string image path = emitter . get image path ( ) ; string image name = new file ( image path . replace ( '|' , '/' ) ) . get name ( ) ; try { file handle file ; if ( image path . equals ( particle editor . default particle ) || image path . equals ( particle editor . default premult particle ) ) { file = gdx . files . classpath ( image path ) ; } else { if ( ( image path . contains ( "/" ) || image path . contains ( "|" ) ) && ! image name . contains ( ".." ) ) { file = gdx . files . absolute ( image path ) ; if ( ! file . exists ( ) ) { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image name ) . get absolute path ( ) ) ; } } else { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image path ) . get absolute path ( ) ) ; } } emitter . set sprite ( new sprite ( new texture ( file ) ) ) ; if ( effect file != null ) { uri relative uri = effect file . get parent file ( ) . to uri ( ) . relativize ( file . file ( ) . to uri ( ) ) ; emitter . set image path ( relative uri . get path ( ) ) ; } } catch ( gdx runtime exception ex ) { ex . print stack trace ( ) ; event queue . invoke later ( new runnable ( ) { public void run ( ) { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } } ) ; emitter . set image path ( null ) ; } } <SENTENCE_END/>

(Copy Probability: 15.3%)

<SENTENCE_START> { final string image path = emitter . get image path ( ) ; string image name = new file ( image path . replace ( '|' , '/' ) ) . get name ( ) ; try { file handle file ; if ( image path . equals ( particle editor . default particle ) || image path . equals ( particle editor . default premult particle ) ) { file = gdx . files . classpath ( image path ) ; } else { if ( ( image path . contains ( "/" ) || image path . contains ( "|" ) ) && ! image name . contains ( ".." ) ) { file = gdx . files . absolute ( image path ) ; if ( ! file . exists ( ) ) { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image name ) . get absolute path ( ) ) ; } } else { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image path ) . get absolute path ( ) ) ; } } emitter . set sprite ( new sprite ( new texture ( file ) ) ) ; if ( effect file != null ) { uri relative uri = effect file . get parent file ( ) . to uri ( ) . relativize ( file . file ( ) . to uri ( ) ) ; emitter . set image path ( relative uri . get path ( ) ) ; } } catch ( gdx runtime exception ex ) { ex . print stack trace ( ) ; event queue . invoke later ( new runnable ( ) { public void run ( ) { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } } ) ; emitter . set image path ( null ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { final string image path = emitter . get image path ( ) ; string image name = new file ( image path . replace ( '|' , '/' ) ) . get name ( ) ; try { file handle file ; if ( image path . equals ( particle editor . default particle ) || image path . equals ( particle editor . default premult particle ) ) { file = gdx . files . classpath ( image path ) ; } else { if ( ( image path . contains ( "/" ) || image path . contains ( "|" ) ) && ! image name . contains ( ".." ) ) { file = gdx . files . absolute ( image path ) ; if ( ! file . exists ( ) ) { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image name ) . get absolute path ( ) ) ; } } else { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image path ) . get absolute path ( ) ) ; } } emitter . set sprite ( new sprite ( new texture ( file ) ) ) ; if ( effect file != null ) { uri relative uri = effect file . get parent file ( ) . to uri ( ) . relativize ( file . file ( ) . to uri ( ) ) ; emitter . set image path ( relative uri . get path ( ) ) ; } } catch ( gdx runtime exception ex ) { ex . print stack trace ( ) ; event queue . invoke later ( new runnable ( ) { public void run ( ) { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } } ) ; emitter . set image path ( null ) ; } } <SENTENCE_END/>

(Copy Probability: 6.8%)

<SENTENCE_START> { final string image path = emitter . get image path ( ) ; string image name = new file ( image path . replace ( '|' , '/' ) ) . get name ( ) ; try { file handle file ; if ( image path . equals ( particle editor . default particle ) || image path . equals ( particle editor . default premult particle ) ) { file = gdx . files . classpath ( image path ) ; } else { if ( ( image path . contains ( "/" ) || image path . contains ( "|" ) ) && ! image name . contains ( ".." ) ) { file = gdx . files . absolute ( image path ) ; if ( ! file . exists ( ) ) { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image name ) . get absolute path ( ) ) ; } } else { file = gdx . files . absolute ( new file ( effect file . get parent file ( ) , image path ) . get absolute path ( ) ) ; } } emitter . set sprite ( new sprite ( new texture ( file ) ) ) ; if ( effect file != null ) { uri relative uri = effect file . get parent file ( ) . to uri ( ) . relativize ( file . file ( ) . to uri ( ) ) ; emitter . set image path ( relative uri . get path ( ) ) ; } } catch ( gdx runtime exception ex ) { ex . print stack trace ( ) ; event queue . invoke later ( new runnable ( ) { public void run ( ) { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } } ) ; emitter . set image path ( null ) ; } } <SENTENCE_END/>


Original Name run

run

<SENTENCE_START> { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } <SENTENCE_END/>

(Copy Probability: 1.9%)

<SENTENCE_START> { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } <SENTENCE_END/>

(Copy Probability: 26.8%)

<SENTENCE_START> { j option pane . show message dialog ( particle editor . this , "Error loading image: " + image path ) ; } <SENTENCE_END/>


Original Name key,down

key

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.2%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

down

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>


Original Name key,up

key

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.2%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

up

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>


Original Name key,typed

key

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.2%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

typed

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>


Original Name touch,down

touch

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 1.9%)

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

down

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 4.9%)

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>


Original Name touch,up

touch

<SENTENCE_START> { particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window lost focus ) ) ; particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window gained focus ) ) ; particle editor . this . request focus in window ( ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 5.2%)

<SENTENCE_START> { particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window lost focus ) ) ; particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window gained focus ) ) ; particle editor . this . request focus in window ( ) ; return false ; } <SENTENCE_END/>

up

<SENTENCE_START> { particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window lost focus ) ) ; particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window gained focus ) ) ; particle editor . this . request focus in window ( ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 53.5%)

<SENTENCE_START> { particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window lost focus ) ) ; particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window gained focus ) ) ; particle editor . this . request focus in window ( ) ; return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window lost focus ) ) ; particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window gained focus ) ) ; particle editor . this . request focus in window ( ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 33.6%)

<SENTENCE_START> { particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window lost focus ) ) ; particle editor . this . dispatch event ( new window event ( particle editor . this , window event . window gained focus ) ) ; particle editor . this . request focus in window ( ) ; return false ; } <SENTENCE_END/>


Original Name touch,dragged

touch

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 1.9%)

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

dragged

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>

(Copy Probability: 4.9%)

<SENTENCE_START> { vector 3 touch point = new vector 3 ( x , y , 0 ) ; world camera . unproject ( touch point ) ; effect . set position ( touch point . x , touch point . y ) ; return false ; } <SENTENCE_END/>


Original Name main

main

<SENTENCE_START> { for ( look and feel info info : ui manager . get installed look and feels ( ) ) { if ( "Nimbus" . equals ( info . get name ( ) ) ) { try { ui manager . set look and feel ( info . get class name ( ) ) ; } catch ( throwable ignored ) { } break ; } } event queue . invoke later ( new runnable ( ) { public void run ( ) { new particle editor ( ) ; } } ) ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { for ( look and feel info info : ui manager . get installed look and feels ( ) ) { if ( "Nimbus" . equals ( info . get name ( ) ) ) { try { ui manager . set look and feel ( info . get class name ( ) ) ; } catch ( throwable ignored ) { } break ; } } event queue . invoke later ( new runnable ( ) { public void run ( ) { new particle editor ( ) ; } } ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( look and feel info info : ui manager . get installed look and feels ( ) ) { if ( "Nimbus" . equals ( info . get name ( ) ) ) { try { ui manager . set look and feel ( info . get class name ( ) ) ; } catch ( throwable ignored ) { } break ; } } event queue . invoke later ( new runnable ( ) { public void run ( ) { new particle editor ( ) ; } } ) ; } <SENTENCE_END/>

(Copy Probability: 11.6%)

<SENTENCE_START> { for ( look and feel info info : ui manager . get installed look and feels ( ) ) { if ( "Nimbus" . equals ( info . get name ( ) ) ) { try { ui manager . set look and feel ( info . get class name ( ) ) ; } catch ( throwable ignored ) { } break ; } } event queue . invoke later ( new runnable ( ) { public void run ( ) { new particle editor ( ) ; } } ) ; } <SENTENCE_END/>


Original Name run

run

<SENTENCE_START> { new particle editor ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { new particle editor ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { new particle editor ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { new particle editor ( ) ; } <SENTENCE_END/>


Original Name add,channel

add

<SENTENCE_START> { return %SELF% ( channel descriptor , null ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return %SELF% ( channel descriptor , null ) ; } <SENTENCE_END/>

channel

<SENTENCE_START> { return %SELF% ( channel descriptor , null ) ; } <SENTENCE_END/>

(Copy Probability: 8.6%)

<SENTENCE_START> { return %SELF% ( channel descriptor , null ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return %SELF% ( channel descriptor , null ) ; } <SENTENCE_END/>

(Copy Probability: 2.2%)

<SENTENCE_START> { return %SELF% ( channel descriptor , null ) ; } <SENTENCE_END/>


Original Name add,channel

add

<SENTENCE_START> { t channel = get channel ( channel descriptor ) ; if ( channel == null ) { channel = allocate channel ( channel descriptor ) ; if ( initializer != null ) initializer . init ( channel ) ; arrays . add ( channel ) ; } return channel ; } <SENTENCE_END/>

(Copy Probability: 4.3%)

<SENTENCE_START> { t channel = get channel ( channel descriptor ) ; if ( channel == null ) { channel = allocate channel ( channel descriptor ) ; if ( initializer != null ) initializer . init ( channel ) ; arrays . add ( channel ) ; } return channel ; } <SENTENCE_END/>

channel

<SENTENCE_START> { t channel = get channel ( channel descriptor ) ; if ( channel == null ) { channel = allocate channel ( channel descriptor ) ; if ( initializer != null ) initializer . init ( channel ) ; arrays . add ( channel ) ; } return channel ; } <SENTENCE_END/>

(Copy Probability: 26.6%)

<SENTENCE_START> { t channel = get channel ( channel descriptor ) ; if ( channel == null ) { channel = allocate channel ( channel descriptor ) ; if ( initializer != null ) initializer . init ( channel ) ; arrays . add ( channel ) ; } return channel ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { t channel = get channel ( channel descriptor ) ; if ( channel == null ) { channel = allocate channel ( channel descriptor ) ; if ( initializer != null ) initializer . init ( channel ) ; arrays . add ( channel ) ; } return channel ; } <SENTENCE_END/>

(Copy Probability: 5.7%)

<SENTENCE_START> { t channel = get channel ( channel descriptor ) ; if ( channel == null ) { channel = allocate channel ( channel descriptor ) ; if ( initializer != null ) initializer . init ( channel ) ; arrays . add ( channel ) ; } return channel ; } <SENTENCE_END/>


Original Name allocate,channel

allocate

<SENTENCE_START> { if ( channel descriptor . type == float . class ) { return ( t ) new float channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else if ( channel descriptor . type == int . class ) { return ( t ) new int channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else { return ( t ) new object channel ( channel descriptor . id , channel descriptor . count , capacity , channel descriptor . type ) ; } } <SENTENCE_END/>

(Copy Probability: 5.7%)

<SENTENCE_START> { if ( channel descriptor . type == float . class ) { return ( t ) new float channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else if ( channel descriptor . type == int . class ) { return ( t ) new int channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else { return ( t ) new object channel ( channel descriptor . id , channel descriptor . count , capacity , channel descriptor . type ) ; } } <SENTENCE_END/>

channel

<SENTENCE_START> { if ( channel descriptor . type == float . class ) { return ( t ) new float channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else if ( channel descriptor . type == int . class ) { return ( t ) new int channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else { return ( t ) new object channel ( channel descriptor . id , channel descriptor . count , capacity , channel descriptor . type ) ; } } <SENTENCE_END/>

(Copy Probability: 41.3%)

<SENTENCE_START> { if ( channel descriptor . type == float . class ) { return ( t ) new float channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else if ( channel descriptor . type == int . class ) { return ( t ) new int channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else { return ( t ) new object channel ( channel descriptor . id , channel descriptor . count , capacity , channel descriptor . type ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( channel descriptor . type == float . class ) { return ( t ) new float channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else if ( channel descriptor . type == int . class ) { return ( t ) new int channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else { return ( t ) new object channel ( channel descriptor . id , channel descriptor . count , capacity , channel descriptor . type ) ; } } <SENTENCE_END/>

(Copy Probability: 8.6%)

<SENTENCE_START> { if ( channel descriptor . type == float . class ) { return ( t ) new float channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else if ( channel descriptor . type == int . class ) { return ( t ) new int channel ( channel descriptor . id , channel descriptor . count , capacity ) ; } else { return ( t ) new object channel ( channel descriptor . id , channel descriptor . count , capacity , channel descriptor . type ) ; } } <SENTENCE_END/>


Original Name remove,array

remove

<SENTENCE_START> { arrays . remove index ( find index ( id ) ) ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { arrays . remove index ( find index ( id ) ) ; } <SENTENCE_END/>

array

<SENTENCE_START> { arrays . remove index ( find index ( id ) ) ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { arrays . remove index ( find index ( id ) ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { arrays . remove index ( find index ( id ) ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { arrays . remove index ( find index ( id ) ) ; } <SENTENCE_END/>


Original Name find,index

find

<SENTENCE_START> { for ( int i = 0 ; i < arrays . size ; ++ i ) { channel array = arrays . items [ i ] ; if ( array . id == id ) return i ; } return - 1 ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { for ( int i = 0 ; i < arrays . size ; ++ i ) { channel array = arrays . items [ i ] ; if ( array . id == id ) return i ; } return - 1 ; } <SENTENCE_END/>

index

<SENTENCE_START> { for ( int i = 0 ; i < arrays . size ; ++ i ) { channel array = arrays . items [ i ] ; if ( array . id == id ) return i ; } return - 1 ; } <SENTENCE_END/>

(Copy Probability: 10.1%)

<SENTENCE_START> { for ( int i = 0 ; i < arrays . size ; ++ i ) { channel array = arrays . items [ i ] ; if ( array . id == id ) return i ; } return - 1 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( int i = 0 ; i < arrays . size ; ++ i ) { channel array = arrays . items [ i ] ; if ( array . id == id ) return i ; } return - 1 ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { for ( int i = 0 ; i < arrays . size ; ++ i ) { channel array = arrays . items [ i ] ; if ( array . id == id ) return i ; } return - 1 ; } <SENTENCE_END/>


Original Name add,element

add

<SENTENCE_START> { if ( size == capacity ) throw new gdx runtime exception ( "Capacity reached, cannot add other elements" ) ; int k = 0 ; for ( channel stride array : arrays ) { stride array . add ( k , values ) ; k += stride array . stride size ; } ++ size ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { if ( size == capacity ) throw new gdx runtime exception ( "Capacity reached, cannot add other elements" ) ; int k = 0 ; for ( channel stride array : arrays ) { stride array . add ( k , values ) ; k += stride array . stride size ; } ++ size ; } <SENTENCE_END/>

element

<SENTENCE_START> { if ( size == capacity ) throw new gdx runtime exception ( "Capacity reached, cannot add other elements" ) ; int k = 0 ; for ( channel stride array : arrays ) { stride array . add ( k , values ) ; k += stride array . stride size ; } ++ size ; } <SENTENCE_END/>

(Copy Probability: 7.2%)

<SENTENCE_START> { if ( size == capacity ) throw new gdx runtime exception ( "Capacity reached, cannot add other elements" ) ; int k = 0 ; for ( channel stride array : arrays ) { stride array . add ( k , values ) ; k += stride array . stride size ; } ++ size ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( size == capacity ) throw new gdx runtime exception ( "Capacity reached, cannot add other elements" ) ; int k = 0 ; for ( channel stride array : arrays ) { stride array . add ( k , values ) ; k += stride array . stride size ; } ++ size ; } <SENTENCE_END/>

(Copy Probability: 8.6%)

<SENTENCE_START> { if ( size == capacity ) throw new gdx runtime exception ( "Capacity reached, cannot add other elements" ) ; int k = 0 ; for ( channel stride array : arrays ) { stride array . add ( k , values ) ; k += stride array . stride size ; } ++ size ; } <SENTENCE_END/>


Original Name remove,element

remove

<SENTENCE_START> { int last = size - 1 ; for ( channel stride array : arrays ) { stride array . swap ( index , last ) ; } size = last ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { int last = size - 1 ; for ( channel stride array : arrays ) { stride array . swap ( index , last ) ; } size = last ; } <SENTENCE_END/>

element

<SENTENCE_START> { int last = size - 1 ; for ( channel stride array : arrays ) { stride array . swap ( index , last ) ; } size = last ; } <SENTENCE_END/>

(Copy Probability: 14.7%)

<SENTENCE_START> { int last = size - 1 ; for ( channel stride array : arrays ) { stride array . swap ( index , last ) ; } size = last ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int last = size - 1 ; for ( channel stride array : arrays ) { stride array . swap ( index , last ) ; } size = last ; } <SENTENCE_END/>

(Copy Probability: 1.9%)

<SENTENCE_START> { int last = size - 1 ; for ( channel stride array : arrays ) { stride array . swap ( index , last ) ; } size = last ; } <SENTENCE_END/>


Original Name get,channel

get

<SENTENCE_START> { for ( channel array : arrays ) { if ( array . id == descriptor . id ) return ( t ) array ; } return null ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { for ( channel array : arrays ) { if ( array . id == descriptor . id ) return ( t ) array ; } return null ; } <SENTENCE_END/>

channel

<SENTENCE_START> { for ( channel array : arrays ) { if ( array . id == descriptor . id ) return ( t ) array ; } return null ; } <SENTENCE_END/>

(Copy Probability: 54.4%)

<SENTENCE_START> { for ( channel array : arrays ) { if ( array . id == descriptor . id ) return ( t ) array ; } return null ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { for ( channel array : arrays ) { if ( array . id == descriptor . id ) return ( t ) array ; } return null ; } <SENTENCE_END/>

(Copy Probability: 6.6%)

<SENTENCE_START> { for ( channel array : arrays ) { if ( array . id == descriptor . id ) return ( t ) array ; } return null ; } <SENTENCE_END/>


Original Name clear

clear

<SENTENCE_START> { arrays . %SELF% ( ) ; size = 0 ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { arrays . %SELF% ( ) ; size = 0 ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { arrays . %SELF% ( ) ; size = 0 ; } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { arrays . %SELF% ( ) ; size = 0 ; } <SENTENCE_END/>


Original Name set,capacity

set

<SENTENCE_START> { if ( capacity != required capacity ) { for ( channel channel : arrays ) { channel . %SELF% ( required capacity ) ; } capacity = required capacity ; } } <SENTENCE_END/>

(Copy Probability: 6.2%)

<SENTENCE_START> { if ( capacity != required capacity ) { for ( channel channel : arrays ) { channel . %SELF% ( required capacity ) ; } capacity = required capacity ; } } <SENTENCE_END/>

capacity

<SENTENCE_START> { if ( capacity != required capacity ) { for ( channel channel : arrays ) { channel . %SELF% ( required capacity ) ; } capacity = required capacity ; } } <SENTENCE_END/>

(Copy Probability: 91.6%)

<SENTENCE_START> { if ( capacity != required capacity ) { for ( channel channel : arrays ) { channel . %SELF% ( required capacity ) ; } capacity = required capacity ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( capacity != required capacity ) { for ( channel channel : arrays ) { channel . %SELF% ( required capacity ) ; } capacity = required capacity ; } } <SENTENCE_END/>

(Copy Probability: 31.6%)

<SENTENCE_START> { if ( capacity != required capacity ) { for ( channel channel : arrays ) { channel . %SELF% ( required capacity ) ; } capacity = required capacity ; } } <SENTENCE_END/>


Original Name get,c,ptr

get

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

c

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 99.7%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>


Original Name set,bvh,aabb,min

set

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

bvh

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

aabb

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

min

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 94.7%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 10.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb min set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,bvh,aabb,min

get

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

bvh

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

aabb

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

min

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 94.5%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 11.1%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb min get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,bvh,aabb,max

set

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

bvh

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

aabb

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

max

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 93.6%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 9.6%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh aabb max set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,bvh,aabb,max

get

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

bvh

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

aabb

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

max

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 93.6%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 10.4%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh aabb max get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,bvh,quantization

set

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh quantization set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh quantization set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

bvh

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh quantization set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh quantization set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

quantization

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh quantization set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 98.1%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh quantization set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh quantization set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data bvh quantization set ( swig c ptr , this , bt vector 3 float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,bvh,quantization

get

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh quantization get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh quantization get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

bvh

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh quantization get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh quantization get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

quantization

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh quantization get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 97.6%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh quantization get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh quantization get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data bvh quantization get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt vector 3 float data ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,cur,node,index

set

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 1.6%)

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

cur

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

node

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

index

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 56.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 6.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data cur node index set ( swig c ptr , this , value ) ; } <SENTENCE_END/>


Original Name get,cur,node,index

get

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

cur

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

node

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

index

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 57.3%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 6.7%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data cur node index get ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name set,use,quantization

set

<SENTENCE_START> { collision jni . bt quantized bvh float data use quantization set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { collision jni . bt quantized bvh float data use quantization set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

use

<SENTENCE_START> { collision jni . bt quantized bvh float data use quantization set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data use quantization set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

quantization

<SENTENCE_START> { collision jni . bt quantized bvh float data use quantization set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.6%)

<SENTENCE_START> { collision jni . bt quantized bvh float data use quantization set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data use quantization set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 31.3%)

<SENTENCE_START> { collision jni . bt quantized bvh float data use quantization set ( swig c ptr , this , value ) ; } <SENTENCE_END/>


Original Name get,use,quantization

get

<SENTENCE_START> { return collision jni . bt quantized bvh float data use quantization get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 2.2%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data use quantization get ( swig c ptr , this ) ; } <SENTENCE_END/>

use

<SENTENCE_START> { return collision jni . bt quantized bvh float data use quantization get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data use quantization get ( swig c ptr , this ) ; } <SENTENCE_END/>

quantization

<SENTENCE_START> { return collision jni . bt quantized bvh float data use quantization get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data use quantization get ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return collision jni . bt quantized bvh float data use quantization get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 42.2%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data use quantization get ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name set,num,contiguous,leaf,nodes

set

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

num

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.7%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

contiguous

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

leaf

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 90.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

nodes

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 23.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 22.6%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num contiguous leaf nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>


Original Name get,num,contiguous,leaf,nodes

get

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 2.2%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

num

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.6%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

contiguous

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

leaf

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 89.8%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

nodes

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 23.9%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 22.5%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num contiguous leaf nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name set,num,quantized,contiguous,nodes

set

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

num

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

quantized

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

contiguous

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 96.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

nodes

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 89.6%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 33.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num quantized contiguous nodes set ( swig c ptr , this , value ) ; } <SENTENCE_END/>


Original Name get,num,quantized,contiguous,nodes

get

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

num

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

quantized

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

contiguous

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 96.3%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

nodes

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 89.1%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 32.1%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num quantized contiguous nodes get ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name set,contiguous,nodes,ptr

set

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

contiguous

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

nodes

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 94.1%)

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 9.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data contiguous nodes ptr set ( swig c ptr , this , bt optimized bvh node float data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,contiguous,nodes,ptr

get

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

contiguous

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

nodes

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 91.6%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 8.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt optimized bvh node float data ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,quantized,contiguous,nodes,ptr

set

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

quantized

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

contiguous

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

nodes

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 98.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 39.1%)

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 21.6%)

<SENTENCE_START> { collision jni . bt quantized bvh float data quantized contiguous nodes ptr set ( swig c ptr , this , bt quantized bvh node data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,quantized,contiguous,nodes,ptr

get

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

quantized

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

contiguous

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

nodes

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 97.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 34.4%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 18.7%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data quantized contiguous nodes ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt quantized bvh node data ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,sub,tree,info,ptr

set

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

sub

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

tree

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

info

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 83.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 18.3%)

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>

(Copy Probability: 18.7%)

<SENTENCE_START> { collision jni . bt quantized bvh float data sub tree info ptr set ( swig c ptr , this , bt bvh subtree info data . get c ptr ( value ) , value ) ; } <SENTENCE_END/>


Original Name get,sub,tree,info,ptr

get

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

sub

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

tree

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

info

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 76.6%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 15.3%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>

(Copy Probability: 15.6%)

<SENTENCE_START> { long c ptr = collision jni . bt quantized bvh float data sub tree info ptr get ( swig c ptr , this ) ; return ( c ptr == 0 ) ? null : new bt bvh subtree info data ( c ptr , false ) ; } <SENTENCE_END/>


Original Name set,traversal,mode

set

<SENTENCE_START> { collision jni . bt quantized bvh float data traversal mode set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.2%)

<SENTENCE_START> { collision jni . bt quantized bvh float data traversal mode set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

traversal

<SENTENCE_START> { collision jni . bt quantized bvh float data traversal mode set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data traversal mode set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

mode

<SENTENCE_START> { collision jni . bt quantized bvh float data traversal mode set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data traversal mode set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data traversal mode set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data traversal mode set ( swig c ptr , this , value ) ; } <SENTENCE_END/>


Original Name get,traversal,mode

get

<SENTENCE_START> { return collision jni . bt quantized bvh float data traversal mode get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 2.0%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data traversal mode get ( swig c ptr , this ) ; } <SENTENCE_END/>

traversal

<SENTENCE_START> { return collision jni . bt quantized bvh float data traversal mode get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data traversal mode get ( swig c ptr , this ) ; } <SENTENCE_END/>

mode

<SENTENCE_START> { return collision jni . bt quantized bvh float data traversal mode get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data traversal mode get ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return collision jni . bt quantized bvh float data traversal mode get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data traversal mode get ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name set,num,subtree,headers

set

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 2.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

num

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

subtree

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

headers

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 82.8%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>

(Copy Probability: 7.5%)

<SENTENCE_START> { collision jni . bt quantized bvh float data num subtree headers set ( swig c ptr , this , value ) ; } <SENTENCE_END/>


Original Name get,num,subtree,headers

get

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 1.8%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

num

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

subtree

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 100.0%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

headers

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 82.6%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 7.3%)

<SENTENCE_START> { return collision jni . bt quantized bvh float data num subtree headers get ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name new,global,id

new

<SENTENCE_START> { return current global id ++ ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { return current global id ++ ; } <SENTENCE_END/>

global

<SENTENCE_START> { return current global id ++ ; } <SENTENCE_END/>

(Copy Probability: 97.0%)

<SENTENCE_START> { return current global id ++ ; } <SENTENCE_END/>

id

<SENTENCE_START> { return current global id ++ ; } <SENTENCE_END/>

(Copy Probability: 96.1%)

<SENTENCE_START> { return current global id ++ ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return current global id ++ ; } <SENTENCE_END/>

(Copy Probability: 4.5%)

<SENTENCE_START> { return current global id ++ ; } <SENTENCE_END/>


Original Name get

get

<SENTENCE_START> { if ( instance == null ) instance = new texture region initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { if ( instance == null ) instance = new texture region initializer ( ) ; return instance ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( instance == null ) instance = new texture region initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 12.3%)

<SENTENCE_START> { if ( instance == null ) instance = new texture region initializer ( ) ; return instance ; } <SENTENCE_END/>


Original Name get

get

<SENTENCE_START> { if ( instance == null ) instance = new color initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { if ( instance == null ) instance = new color initializer ( ) ; return instance ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( instance == null ) instance = new color initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 4.0%)

<SENTENCE_START> { if ( instance == null ) instance = new color initializer ( ) ; return instance ; } <SENTENCE_END/>


Original Name get

get

<SENTENCE_START> { if ( instance == null ) instance = new scale initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { if ( instance == null ) instance = new scale initializer ( ) ; return instance ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( instance == null ) instance = new scale initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 4.9%)

<SENTENCE_START> { if ( instance == null ) instance = new scale initializer ( ) ; return instance ; } <SENTENCE_END/>


Original Name get

get

<SENTENCE_START> { if ( instance == null ) instance = new rotation 2 d initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { if ( instance == null ) instance = new rotation 2 d initializer ( ) ; return instance ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( instance == null ) instance = new rotation 2 d initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 6.5%)

<SENTENCE_START> { if ( instance == null ) instance = new rotation 2 d initializer ( ) ; return instance ; } <SENTENCE_END/>


Original Name get

get

<SENTENCE_START> { if ( instance == null ) instance = new rotation 3 d initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { if ( instance == null ) instance = new rotation 3 d initializer ( ) ; return instance ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( instance == null ) instance = new rotation 3 d initializer ( ) ; return instance ; } <SENTENCE_END/>

(Copy Probability: 5.6%)

<SENTENCE_START> { if ( instance == null ) instance = new rotation 3 d initializer ( ) ; return instance ; } <SENTENCE_END/>


Original Name new,id

new

<SENTENCE_START> { return current id ++ ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { return current id ++ ; } <SENTENCE_END/>

id

<SENTENCE_START> { return current id ++ ; } <SENTENCE_END/>

(Copy Probability: 95.0%)

<SENTENCE_START> { return current id ++ ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return current id ++ ; } <SENTENCE_END/>

(Copy Probability: 85.0%)

<SENTENCE_START> { return current id ++ ; } <SENTENCE_END/>


Original Name reset,ids

reset

<SENTENCE_START> { current id = current global id ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { current id = current global id ; } <SENTENCE_END/>

ids

<SENTENCE_START> { current id = current global id ; } <SENTENCE_END/>

(Copy Probability: 97.8%)

<SENTENCE_START> { current id = current global id ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { current id = current global id ; } <SENTENCE_END/>

(Copy Probability: 82.0%)

<SENTENCE_START> { current id = current global id ; } <SENTENCE_END/>


Original Name get,c,ptr

get

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

c

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 99.7%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

ptr

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { return ( obj == null ) ? 0 : obj . swig c ptr ; } <SENTENCE_END/>


Original Name prepare,solve

prepare

<SENTENCE_START> { dynamics jni . bt constraint solver prepare solve ( swig c ptr , this , arg 0 , arg 1 ) ; } <SENTENCE_END/>

(Copy Probability: 33.0%)

<SENTENCE_START> { dynamics jni . bt constraint solver prepare solve ( swig c ptr , this , arg 0 , arg 1 ) ; } <SENTENCE_END/>

solve

<SENTENCE_START> { dynamics jni . bt constraint solver prepare solve ( swig c ptr , this , arg 0 , arg 1 ) ; } <SENTENCE_END/>

(Copy Probability: 98.4%)

<SENTENCE_START> { dynamics jni . bt constraint solver prepare solve ( swig c ptr , this , arg 0 , arg 1 ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { dynamics jni . bt constraint solver prepare solve ( swig c ptr , this , arg 0 , arg 1 ) ; } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { dynamics jni . bt constraint solver prepare solve ( swig c ptr , this , arg 0 , arg 1 ) ; } <SENTENCE_END/>


Original Name solve,group

solve

<SENTENCE_START> { return dynamics jni . bt constraint solver solve group ( swig c ptr , this , swigtype p p bt collision object . get c ptr ( bodies ) , num bodies , swigtype p p bt persistent manifold . get c ptr ( manifold ) , num manifolds , swigtype p p bt typed constraint . get c ptr ( constraints ) , num constraints , bt contact solver info . get c ptr ( info ) , info , bt i debug draw . get c ptr ( debug drawer ) , debug drawer , bt dispatcher . get c ptr ( dispatcher ) , dispatcher ) ; } <SENTENCE_END/>

(Copy Probability: 17.5%)

<SENTENCE_START> { return dynamics jni . bt constraint solver solve group ( swig c ptr , this , swigtype p p bt collision object . get c ptr ( bodies ) , num bodies , swigtype p p bt persistent manifold . get c ptr ( manifold ) , num manifolds , swigtype p p bt typed constraint . get c ptr ( constraints ) , num constraints , bt contact solver info . get c ptr ( info ) , info , bt i debug draw . get c ptr ( debug drawer ) , debug drawer , bt dispatcher . get c ptr ( dispatcher ) , dispatcher ) ; } <SENTENCE_END/>

group

<SENTENCE_START> { return dynamics jni . bt constraint solver solve group ( swig c ptr , this , swigtype p p bt collision object . get c ptr ( bodies ) , num bodies , swigtype p p bt persistent manifold . get c ptr ( manifold ) , num manifolds , swigtype p p bt typed constraint . get c ptr ( constraints ) , num constraints , bt contact solver info . get c ptr ( info ) , info , bt i debug draw . get c ptr ( debug drawer ) , debug drawer , bt dispatcher . get c ptr ( dispatcher ) , dispatcher ) ; } <SENTENCE_END/>

(Copy Probability: 96.4%)

<SENTENCE_START> { return dynamics jni . bt constraint solver solve group ( swig c ptr , this , swigtype p p bt collision object . get c ptr ( bodies ) , num bodies , swigtype p p bt persistent manifold . get c ptr ( manifold ) , num manifolds , swigtype p p bt typed constraint . get c ptr ( constraints ) , num constraints , bt contact solver info . get c ptr ( info ) , info , bt i debug draw . get c ptr ( debug drawer ) , debug drawer , bt dispatcher . get c ptr ( dispatcher ) , dispatcher ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return dynamics jni . bt constraint solver solve group ( swig c ptr , this , swigtype p p bt collision object . get c ptr ( bodies ) , num bodies , swigtype p p bt persistent manifold . get c ptr ( manifold ) , num manifolds , swigtype p p bt typed constraint . get c ptr ( constraints ) , num constraints , bt contact solver info . get c ptr ( info ) , info , bt i debug draw . get c ptr ( debug drawer ) , debug drawer , bt dispatcher . get c ptr ( dispatcher ) , dispatcher ) ; } <SENTENCE_END/>

(Copy Probability: 9.0%)

<SENTENCE_START> { return dynamics jni . bt constraint solver solve group ( swig c ptr , this , swigtype p p bt collision object . get c ptr ( bodies ) , num bodies , swigtype p p bt persistent manifold . get c ptr ( manifold ) , num manifolds , swigtype p p bt typed constraint . get c ptr ( constraints ) , num constraints , bt contact solver info . get c ptr ( info ) , info , bt i debug draw . get c ptr ( debug drawer ) , debug drawer , bt dispatcher . get c ptr ( dispatcher ) , dispatcher ) ; } <SENTENCE_END/>


Original Name all,solved

all

<SENTENCE_START> { dynamics jni . bt constraint solver all solved ( swig c ptr , this , bt contact solver info . get c ptr ( arg 0 ) , arg 0 , bt i debug draw . get c ptr ( arg 1 ) , arg 1 ) ; } <SENTENCE_END/>

(Copy Probability: 21.4%)

<SENTENCE_START> { dynamics jni . bt constraint solver all solved ( swig c ptr , this , bt contact solver info . get c ptr ( arg 0 ) , arg 0 , bt i debug draw . get c ptr ( arg 1 ) , arg 1 ) ; } <SENTENCE_END/>

solved

<SENTENCE_START> { dynamics jni . bt constraint solver all solved ( swig c ptr , this , bt contact solver info . get c ptr ( arg 0 ) , arg 0 , bt i debug draw . get c ptr ( arg 1 ) , arg 1 ) ; } <SENTENCE_END/>

(Copy Probability: 93.8%)

<SENTENCE_START> { dynamics jni . bt constraint solver all solved ( swig c ptr , this , bt contact solver info . get c ptr ( arg 0 ) , arg 0 , bt i debug draw . get c ptr ( arg 1 ) , arg 1 ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { dynamics jni . bt constraint solver all solved ( swig c ptr , this , bt contact solver info . get c ptr ( arg 0 ) , arg 0 , bt i debug draw . get c ptr ( arg 1 ) , arg 1 ) ; } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { dynamics jni . bt constraint solver all solved ( swig c ptr , this , bt contact solver info . get c ptr ( arg 0 ) , arg 0 , bt i debug draw . get c ptr ( arg 1 ) , arg 1 ) ; } <SENTENCE_END/>


Original Name reset

reset

<SENTENCE_START> { dynamics jni . bt constraint solver reset ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 4.1%)

<SENTENCE_START> { dynamics jni . bt constraint solver reset ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { dynamics jni . bt constraint solver reset ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 25.4%)

<SENTENCE_START> { dynamics jni . bt constraint solver reset ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name get,solver,type

get

<SENTENCE_START> { return dynamics jni . bt constraint solver get solver type ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { return dynamics jni . bt constraint solver get solver type ( swig c ptr , this ) ; } <SENTENCE_END/>

solver

<SENTENCE_START> { return dynamics jni . bt constraint solver get solver type ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.8%)

<SENTENCE_START> { return dynamics jni . bt constraint solver get solver type ( swig c ptr , this ) ; } <SENTENCE_END/>

type

<SENTENCE_START> { return dynamics jni . bt constraint solver get solver type ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 99.9%)

<SENTENCE_START> { return dynamics jni . bt constraint solver get solver type ( swig c ptr , this ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return dynamics jni . bt constraint solver get solver type ( swig c ptr , this ) ; } <SENTENCE_END/>

(Copy Probability: 15.9%)

<SENTENCE_START> { return dynamics jni . bt constraint solver get solver type ( swig c ptr , this ) ; } <SENTENCE_END/>


Original Name format

format

<SENTENCE_START> { if ( message format != null ) { message format . apply pattern ( replace escape chars ( pattern ) ) ; return message format . %SELF% ( args ) ; } return simple format ( pattern , args ) ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { if ( message format != null ) { message format . apply pattern ( replace escape chars ( pattern ) ) ; return message format . %SELF% ( args ) ; } return simple format ( pattern , args ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( message format != null ) { message format . apply pattern ( replace escape chars ( pattern ) ) ; return message format . %SELF% ( args ) ; } return simple format ( pattern , args ) ; } <SENTENCE_END/>

(Copy Probability: 33.2%)

<SENTENCE_START> { if ( message format != null ) { message format . apply pattern ( replace escape chars ( pattern ) ) ; return message format . %SELF% ( args ) ; } return simple format ( pattern , args ) ; } <SENTENCE_END/>


Original Name replace,escape,chars

replace

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int len = pattern . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = pattern . char at ( i ) ; if ( ch == '|'' ) { changed = true ; buffer . append ( "''" ) ; } else if ( ch == '{' ) { int j = i + 1 ; while ( j < len && pattern . char at ( j ) == '{' ) j ++ ; int escaped = ( j - i ) / 2 ; if ( escaped > 0 ) { changed = true ; buffer . append ( '|'' ) ; do { buffer . append ( '{' ) ; } while ( ( -- escaped ) > 0 ) ; buffer . append ( '|'' ) ; } if ( ( j - i ) % 2 != 0 ) buffer . append ( '{' ) ; i = j - 1 ; } else { buffer . append ( ch ) ; } } return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

(Copy Probability: 2.7%)

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int len = pattern . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = pattern . char at ( i ) ; if ( ch == '|'' ) { changed = true ; buffer . append ( "''" ) ; } else if ( ch == '{' ) { int j = i + 1 ; while ( j < len && pattern . char at ( j ) == '{' ) j ++ ; int escaped = ( j - i ) / 2 ; if ( escaped > 0 ) { changed = true ; buffer . append ( '|'' ) ; do { buffer . append ( '{' ) ; } while ( ( -- escaped ) > 0 ) ; buffer . append ( '|'' ) ; } if ( ( j - i ) % 2 != 0 ) buffer . append ( '{' ) ; i = j - 1 ; } else { buffer . append ( ch ) ; } } return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

escape

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int len = pattern . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = pattern . char at ( i ) ; if ( ch == '|'' ) { changed = true ; buffer . append ( "''" ) ; } else if ( ch == '{' ) { int j = i + 1 ; while ( j < len && pattern . char at ( j ) == '{' ) j ++ ; int escaped = ( j - i ) / 2 ; if ( escaped > 0 ) { changed = true ; buffer . append ( '|'' ) ; do { buffer . append ( '{' ) ; } while ( ( -- escaped ) > 0 ) ; buffer . append ( '|'' ) ; } if ( ( j - i ) % 2 != 0 ) buffer . append ( '{' ) ; i = j - 1 ; } else { buffer . append ( ch ) ; } } return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

(Copy Probability: 4.3%)

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int len = pattern . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = pattern . char at ( i ) ; if ( ch == '|'' ) { changed = true ; buffer . append ( "''" ) ; } else if ( ch == '{' ) { int j = i + 1 ; while ( j < len && pattern . char at ( j ) == '{' ) j ++ ; int escaped = ( j - i ) / 2 ; if ( escaped > 0 ) { changed = true ; buffer . append ( '|'' ) ; do { buffer . append ( '{' ) ; } while ( ( -- escaped ) > 0 ) ; buffer . append ( '|'' ) ; } if ( ( j - i ) % 2 != 0 ) buffer . append ( '{' ) ; i = j - 1 ; } else { buffer . append ( ch ) ; } } return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

chars

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int len = pattern . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = pattern . char at ( i ) ; if ( ch == '|'' ) { changed = true ; buffer . append ( "''" ) ; } else if ( ch == '{' ) { int j = i + 1 ; while ( j < len && pattern . char at ( j ) == '{' ) j ++ ; int escaped = ( j - i ) / 2 ; if ( escaped > 0 ) { changed = true ; buffer . append ( '|'' ) ; do { buffer . append ( '{' ) ; } while ( ( -- escaped ) > 0 ) ; buffer . append ( '|'' ) ; } if ( ( j - i ) % 2 != 0 ) buffer . append ( '{' ) ; i = j - 1 ; } else { buffer . append ( ch ) ; } } return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int len = pattern . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = pattern . char at ( i ) ; if ( ch == '|'' ) { changed = true ; buffer . append ( "''" ) ; } else if ( ch == '{' ) { int j = i + 1 ; while ( j < len && pattern . char at ( j ) == '{' ) j ++ ; int escaped = ( j - i ) / 2 ; if ( escaped > 0 ) { changed = true ; buffer . append ( '|'' ) ; do { buffer . append ( '{' ) ; } while ( ( -- escaped ) > 0 ) ; buffer . append ( '|'' ) ; } if ( ( j - i ) % 2 != 0 ) buffer . append ( '{' ) ; i = j - 1 ; } else { buffer . append ( ch ) ; } } return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int len = pattern . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = pattern . char at ( i ) ; if ( ch == '|'' ) { changed = true ; buffer . append ( "''" ) ; } else if ( ch == '{' ) { int j = i + 1 ; while ( j < len && pattern . char at ( j ) == '{' ) j ++ ; int escaped = ( j - i ) / 2 ; if ( escaped > 0 ) { changed = true ; buffer . append ( '|'' ) ; do { buffer . append ( '{' ) ; } while ( ( -- escaped ) > 0 ) ; buffer . append ( '|'' ) ; } if ( ( j - i ) % 2 != 0 ) buffer . append ( '{' ) ; i = j - 1 ; } else { buffer . append ( ch ) ; } } return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int len = pattern . length ( ) ; for ( int i = 0 ; i < len ; i ++ ) { char ch = pattern . char at ( i ) ; if ( ch == '|'' ) { changed = true ; buffer . append ( "''" ) ; } else if ( ch == '{' ) { int j = i + 1 ; while ( j < len && pattern . char at ( j ) == '{' ) j ++ ; int escaped = ( j - i ) / 2 ; if ( escaped > 0 ) { changed = true ; buffer . append ( '|'' ) ; do { buffer . append ( '{' ) ; } while ( ( -- escaped ) > 0 ) ; buffer . append ( '|'' ) ; } if ( ( j - i ) % 2 != 0 ) buffer . append ( '{' ) ; i = j - 1 ; } else { buffer . append ( ch ) ; } } return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>


Original Name simple,format

simple

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int placeholder = - 1 ; int pattern length = pattern . length ( ) ; for ( int i = 0 ; i < pattern length ; ++ i ) { char ch = pattern . char at ( i ) ; if ( placeholder < 0 ) { if ( ch == '{' ) { changed = true ; if ( i + 1 < pattern length && pattern . char at ( i + 1 ) == '{' ) { buffer . append ( ch ) ; ++ i ; } else { placeholder = 0 ; } } else { buffer . append ( ch ) ; } } else { if ( ch == '}' ) { if ( placeholder >= args . length ) throw new illegal argument exception ( "Argument index out of bounds: " + placeholder ) ; if ( pattern . char at ( i - 1 ) == '{' ) throw new illegal argument exception ( "Missing argument index after a left curly brace" ) ; if ( args [ placeholder ] == null ) buffer . append ( "null" ) ; else buffer . append ( args [ placeholder ] . to string ( ) ) ; placeholder = - 1 ; } else { if ( ch < '0' || ch > '9' ) throw new illegal argument exception ( "Unexpected '" + ch + "' while parsing argument index" ) ; placeholder = placeholder * 10 + ( ch - '0' ) ; } } } if ( placeholder >= 0 ) throw new illegal argument exception ( "Unmatched braces in the pattern." ) ; return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int placeholder = - 1 ; int pattern length = pattern . length ( ) ; for ( int i = 0 ; i < pattern length ; ++ i ) { char ch = pattern . char at ( i ) ; if ( placeholder < 0 ) { if ( ch == '{' ) { changed = true ; if ( i + 1 < pattern length && pattern . char at ( i + 1 ) == '{' ) { buffer . append ( ch ) ; ++ i ; } else { placeholder = 0 ; } } else { buffer . append ( ch ) ; } } else { if ( ch == '}' ) { if ( placeholder >= args . length ) throw new illegal argument exception ( "Argument index out of bounds: " + placeholder ) ; if ( pattern . char at ( i - 1 ) == '{' ) throw new illegal argument exception ( "Missing argument index after a left curly brace" ) ; if ( args [ placeholder ] == null ) buffer . append ( "null" ) ; else buffer . append ( args [ placeholder ] . to string ( ) ) ; placeholder = - 1 ; } else { if ( ch < '0' || ch > '9' ) throw new illegal argument exception ( "Unexpected '" + ch + "' while parsing argument index" ) ; placeholder = placeholder * 10 + ( ch - '0' ) ; } } } if ( placeholder >= 0 ) throw new illegal argument exception ( "Unmatched braces in the pattern." ) ; return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

format

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int placeholder = - 1 ; int pattern length = pattern . length ( ) ; for ( int i = 0 ; i < pattern length ; ++ i ) { char ch = pattern . char at ( i ) ; if ( placeholder < 0 ) { if ( ch == '{' ) { changed = true ; if ( i + 1 < pattern length && pattern . char at ( i + 1 ) == '{' ) { buffer . append ( ch ) ; ++ i ; } else { placeholder = 0 ; } } else { buffer . append ( ch ) ; } } else { if ( ch == '}' ) { if ( placeholder >= args . length ) throw new illegal argument exception ( "Argument index out of bounds: " + placeholder ) ; if ( pattern . char at ( i - 1 ) == '{' ) throw new illegal argument exception ( "Missing argument index after a left curly brace" ) ; if ( args [ placeholder ] == null ) buffer . append ( "null" ) ; else buffer . append ( args [ placeholder ] . to string ( ) ) ; placeholder = - 1 ; } else { if ( ch < '0' || ch > '9' ) throw new illegal argument exception ( "Unexpected '" + ch + "' while parsing argument index" ) ; placeholder = placeholder * 10 + ( ch - '0' ) ; } } } if ( placeholder >= 0 ) throw new illegal argument exception ( "Unmatched braces in the pattern." ) ; return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

(Copy Probability: 6.4%)

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int placeholder = - 1 ; int pattern length = pattern . length ( ) ; for ( int i = 0 ; i < pattern length ; ++ i ) { char ch = pattern . char at ( i ) ; if ( placeholder < 0 ) { if ( ch == '{' ) { changed = true ; if ( i + 1 < pattern length && pattern . char at ( i + 1 ) == '{' ) { buffer . append ( ch ) ; ++ i ; } else { placeholder = 0 ; } } else { buffer . append ( ch ) ; } } else { if ( ch == '}' ) { if ( placeholder >= args . length ) throw new illegal argument exception ( "Argument index out of bounds: " + placeholder ) ; if ( pattern . char at ( i - 1 ) == '{' ) throw new illegal argument exception ( "Missing argument index after a left curly brace" ) ; if ( args [ placeholder ] == null ) buffer . append ( "null" ) ; else buffer . append ( args [ placeholder ] . to string ( ) ) ; placeholder = - 1 ; } else { if ( ch < '0' || ch > '9' ) throw new illegal argument exception ( "Unexpected '" + ch + "' while parsing argument index" ) ; placeholder = placeholder * 10 + ( ch - '0' ) ; } } } if ( placeholder >= 0 ) throw new illegal argument exception ( "Unmatched braces in the pattern." ) ; return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int placeholder = - 1 ; int pattern length = pattern . length ( ) ; for ( int i = 0 ; i < pattern length ; ++ i ) { char ch = pattern . char at ( i ) ; if ( placeholder < 0 ) { if ( ch == '{' ) { changed = true ; if ( i + 1 < pattern length && pattern . char at ( i + 1 ) == '{' ) { buffer . append ( ch ) ; ++ i ; } else { placeholder = 0 ; } } else { buffer . append ( ch ) ; } } else { if ( ch == '}' ) { if ( placeholder >= args . length ) throw new illegal argument exception ( "Argument index out of bounds: " + placeholder ) ; if ( pattern . char at ( i - 1 ) == '{' ) throw new illegal argument exception ( "Missing argument index after a left curly brace" ) ; if ( args [ placeholder ] == null ) buffer . append ( "null" ) ; else buffer . append ( args [ placeholder ] . to string ( ) ) ; placeholder = - 1 ; } else { if ( ch < '0' || ch > '9' ) throw new illegal argument exception ( "Unexpected '" + ch + "' while parsing argument index" ) ; placeholder = placeholder * 10 + ( ch - '0' ) ; } } } if ( placeholder >= 0 ) throw new illegal argument exception ( "Unmatched braces in the pattern." ) ; return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>

(Copy Probability: 3.4%)

<SENTENCE_START> { buffer . set length ( 0 ) ; boolean changed = false ; int placeholder = - 1 ; int pattern length = pattern . length ( ) ; for ( int i = 0 ; i < pattern length ; ++ i ) { char ch = pattern . char at ( i ) ; if ( placeholder < 0 ) { if ( ch == '{' ) { changed = true ; if ( i + 1 < pattern length && pattern . char at ( i + 1 ) == '{' ) { buffer . append ( ch ) ; ++ i ; } else { placeholder = 0 ; } } else { buffer . append ( ch ) ; } } else { if ( ch == '}' ) { if ( placeholder >= args . length ) throw new illegal argument exception ( "Argument index out of bounds: " + placeholder ) ; if ( pattern . char at ( i - 1 ) == '{' ) throw new illegal argument exception ( "Missing argument index after a left curly brace" ) ; if ( args [ placeholder ] == null ) buffer . append ( "null" ) ; else buffer . append ( args [ placeholder ] . to string ( ) ) ; placeholder = - 1 ; } else { if ( ch < '0' || ch > '9' ) throw new illegal argument exception ( "Unexpected '" + ch + "' while parsing argument index" ) ; placeholder = placeholder * 10 + ( ch - '0' ) ; } } } if ( placeholder >= 0 ) throw new illegal argument exception ( "Unmatched braces in the pattern." ) ; return changed ? buffer . to string ( ) : pattern ; } <SENTENCE_END/>


Original Name ensure,capacity

ensure

<SENTENCE_START> { if ( buffer . remaining ( ) > float buffer . length ( ) ) { float buffer = typed arrays . create float 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { if ( buffer . remaining ( ) > float buffer . length ( ) ) { float buffer = typed arrays . create float 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

capacity

<SENTENCE_START> { if ( buffer . remaining ( ) > float buffer . length ( ) ) { float buffer = typed arrays . create float 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { if ( buffer . remaining ( ) > float buffer . length ( ) ) { float buffer = typed arrays . create float 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( buffer . remaining ( ) > float buffer . length ( ) ) { float buffer = typed arrays . create float 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { if ( buffer . remaining ( ) > float buffer . length ( ) ) { float buffer = typed arrays . create float 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>


Original Name ensure,capacity

ensure

<SENTENCE_START> { if ( buffer . remaining ( ) > short buffer . length ( ) ) { short buffer = typed arrays . create int 16 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { if ( buffer . remaining ( ) > short buffer . length ( ) ) { short buffer = typed arrays . create int 16 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

capacity

<SENTENCE_START> { if ( buffer . remaining ( ) > short buffer . length ( ) ) { short buffer = typed arrays . create int 16 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { if ( buffer . remaining ( ) > short buffer . length ( ) ) { short buffer = typed arrays . create int 16 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( buffer . remaining ( ) > short buffer . length ( ) ) { short buffer = typed arrays . create int 16 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { if ( buffer . remaining ( ) > short buffer . length ( ) ) { short buffer = typed arrays . create int 16 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>


Original Name ensure,capacity

ensure

<SENTENCE_START> { if ( buffer . remaining ( ) > int buffer . length ( ) ) { int buffer = typed arrays . create int 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.5%)

<SENTENCE_START> { if ( buffer . remaining ( ) > int buffer . length ( ) ) { int buffer = typed arrays . create int 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

capacity

<SENTENCE_START> { if ( buffer . remaining ( ) > int buffer . length ( ) ) { int buffer = typed arrays . create int 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { if ( buffer . remaining ( ) > int buffer . length ( ) ) { int buffer = typed arrays . create int 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( buffer . remaining ( ) > int buffer . length ( ) ) { int buffer = typed arrays . create int 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.4%)

<SENTENCE_START> { if ( buffer . remaining ( ) > int buffer . length ( ) ) { int buffer = typed arrays . create int 32 array ( buffer . remaining ( ) ) ; } } <SENTENCE_END/>


Original Name copy

copy

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( float 32 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { float buffer . set ( j , buffer . get ( i ) ) ; } return float buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( float 32 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { float buffer . set ( j , buffer . get ( i ) ) ; } return float buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( float 32 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { float buffer . set ( j , buffer . get ( i ) ) ; } return float buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 6.3%)

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( float 32 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { float buffer . set ( j , buffer . get ( i ) ) ; } return float buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>


Original Name copy

copy

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( int 16 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { short buffer . set ( j , buffer . get ( i ) ) ; } return short buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( int 16 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { short buffer . set ( j , buffer . get ( i ) ) ; } return short buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( int 16 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { short buffer . set ( j , buffer . get ( i ) ) ; } return short buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 6.1%)

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( int 16 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { short buffer . set ( j , buffer . get ( i ) ) ; } return short buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>


Original Name copy

copy

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( int 32 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { int buffer . set ( j , buffer . get ( i ) ) ; } return int buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( int 32 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { int buffer . set ( j , buffer . get ( i ) ) ; } return int buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( int 32 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { int buffer . set ( j , buffer . get ( i ) ) ; } return int buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 7.4%)

<SENTENCE_START> { if ( gwt . is prod mode ( ) ) { return ( ( int 32 array ) ( ( has array buffer view ) buffer ) . get typed array ( ) ) . subarray ( buffer . position ( ) , buffer . remaining ( ) ) ; } else { ensure capacity ( buffer ) ; for ( int i = buffer . position ( ) , j = 0 ; i < buffer . limit ( ) ; i ++ , j ++ ) { int buffer . set ( j , buffer . get ( i ) ) ; } return int buffer . subarray ( 0 , buffer . remaining ( ) ) ; } } <SENTENCE_END/>


Original Name allocate,uniform,location,id

allocate

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

uniform

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 4.8%)

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

location

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 6.5%)

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

id

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 6.0%)

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { map < integer , web gl uniform location > prog uniforms = uniforms . get ( program ) ; if ( prog uniforms == null ) { prog uniforms = new hash map < integer , web gl uniform location > ( ) ; uniforms . put ( program , prog uniforms ) ; } int id = next uniform id ++ ; prog uniforms . put ( id , location ) ; return id ; } <SENTENCE_END/>


Original Name get,uniform,location

get

<SENTENCE_START> { return uniforms . get ( curr program ) . get ( location ) ; } <SENTENCE_END/>

(Copy Probability: 0.7%)

<SENTENCE_START> { return uniforms . get ( curr program ) . get ( location ) ; } <SENTENCE_END/>

uniform

<SENTENCE_START> { return uniforms . get ( curr program ) . get ( location ) ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return uniforms . get ( curr program ) . get ( location ) ; } <SENTENCE_END/>

location

<SENTENCE_START> { return uniforms . get ( curr program ) . get ( location ) ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { return uniforms . get ( curr program ) . get ( location ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return uniforms . get ( curr program ) . get ( location ) ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return uniforms . get ( curr program ) . get ( location ) ; } <SENTENCE_END/>


Original Name allocate,shader,id

allocate

<SENTENCE_START> { int id = next shader id ++ ; shaders . put ( id , shader ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { int id = next shader id ++ ; shaders . put ( id , shader ) ; return id ; } <SENTENCE_END/>

shader

<SENTENCE_START> { int id = next shader id ++ ; shaders . put ( id , shader ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 89.6%)

<SENTENCE_START> { int id = next shader id ++ ; shaders . put ( id , shader ) ; return id ; } <SENTENCE_END/>

id

<SENTENCE_START> { int id = next shader id ++ ; shaders . put ( id , shader ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 17.8%)

<SENTENCE_START> { int id = next shader id ++ ; shaders . put ( id , shader ) ; return id ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int id = next shader id ++ ; shaders . put ( id , shader ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { int id = next shader id ++ ; shaders . put ( id , shader ) ; return id ; } <SENTENCE_END/>


Original Name deallocate,shader,id

deallocate

<SENTENCE_START> { shaders . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { shaders . remove ( id ) ; } <SENTENCE_END/>

shader

<SENTENCE_START> { shaders . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { shaders . remove ( id ) ; } <SENTENCE_END/>

id

<SENTENCE_START> { shaders . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { shaders . remove ( id ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { shaders . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { shaders . remove ( id ) ; } <SENTENCE_END/>


Original Name allocate,program,id

allocate

<SENTENCE_START> { int id = next program id ++ ; programs . put ( id , program ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 1.3%)

<SENTENCE_START> { int id = next program id ++ ; programs . put ( id , program ) ; return id ; } <SENTENCE_END/>

program

<SENTENCE_START> { int id = next program id ++ ; programs . put ( id , program ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 54.2%)

<SENTENCE_START> { int id = next program id ++ ; programs . put ( id , program ) ; return id ; } <SENTENCE_END/>

id

<SENTENCE_START> { int id = next program id ++ ; programs . put ( id , program ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 8.3%)

<SENTENCE_START> { int id = next program id ++ ; programs . put ( id , program ) ; return id ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int id = next program id ++ ; programs . put ( id , program ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { int id = next program id ++ ; programs . put ( id , program ) ; return id ; } <SENTENCE_END/>


Original Name deallocate,program,id

deallocate

<SENTENCE_START> { uniforms . remove ( id ) ; programs . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { uniforms . remove ( id ) ; programs . remove ( id ) ; } <SENTENCE_END/>

program

<SENTENCE_START> { uniforms . remove ( id ) ; programs . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 2.9%)

<SENTENCE_START> { uniforms . remove ( id ) ; programs . remove ( id ) ; } <SENTENCE_END/>

id

<SENTENCE_START> { uniforms . remove ( id ) ; programs . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.9%)

<SENTENCE_START> { uniforms . remove ( id ) ; programs . remove ( id ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { uniforms . remove ( id ) ; programs . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { uniforms . remove ( id ) ; programs . remove ( id ) ; } <SENTENCE_END/>


Original Name allocate,buffer,id

allocate

<SENTENCE_START> { int id = next buffer id ++ ; buffers . put ( id , buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 1.3%)

<SENTENCE_START> { int id = next buffer id ++ ; buffers . put ( id , buffer ) ; return id ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { int id = next buffer id ++ ; buffers . put ( id , buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 78.0%)

<SENTENCE_START> { int id = next buffer id ++ ; buffers . put ( id , buffer ) ; return id ; } <SENTENCE_END/>

id

<SENTENCE_START> { int id = next buffer id ++ ; buffers . put ( id , buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 24.5%)

<SENTENCE_START> { int id = next buffer id ++ ; buffers . put ( id , buffer ) ; return id ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int id = next buffer id ++ ; buffers . put ( id , buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 2.7%)

<SENTENCE_START> { int id = next buffer id ++ ; buffers . put ( id , buffer ) ; return id ; } <SENTENCE_END/>


Original Name deallocate,buffer,id

deallocate

<SENTENCE_START> { buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { buffers . remove ( id ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { buffers . remove ( id ) ; } <SENTENCE_END/>

id

<SENTENCE_START> { buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { buffers . remove ( id ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 1.1%)

<SENTENCE_START> { buffers . remove ( id ) ; } <SENTENCE_END/>


Original Name allocate,frame,buffer,id

allocate

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

frame

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 65.9%)

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 54.5%)

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

id

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 5.1%)

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 4.6%)

<SENTENCE_START> { int id = next buffer id ++ ; frame buffers . put ( id , frame buffer ) ; return id ; } <SENTENCE_END/>


Original Name deallocate,frame,buffer,id

deallocate

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

frame

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 2.7%)

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 2.6%)

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

id

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 2.0%)

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 3.6%)

<SENTENCE_START> { frame buffers . remove ( id ) ; } <SENTENCE_END/>


Original Name allocate,render,buffer,id

allocate

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

render

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 80.0%)

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 41.0%)

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

id

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 7.4%)

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { int id = next render buffer id ++ ; render buffers . put ( id , render buffer ) ; return id ; } <SENTENCE_END/>


Original Name deallocate,render,buffer,id

deallocate

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

render

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 3.0%)

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

buffer

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

id

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 1.2%)

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { render buffers . remove ( id ) ; } <SENTENCE_END/>


Original Name allocate,texture,id

allocate

<SENTENCE_START> { int id = next texture id ++ ; textures . put ( id , texture ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 1.7%)

<SENTENCE_START> { int id = next texture id ++ ; textures . put ( id , texture ) ; return id ; } <SENTENCE_END/>

texture

<SENTENCE_START> { int id = next texture id ++ ; textures . put ( id , texture ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 99.0%)

<SENTENCE_START> { int id = next texture id ++ ; textures . put ( id , texture ) ; return id ; } <SENTENCE_END/>

id

<SENTENCE_START> { int id = next texture id ++ ; textures . put ( id , texture ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 40.7%)

<SENTENCE_START> { int id = next texture id ++ ; textures . put ( id , texture ) ; return id ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { int id = next texture id ++ ; textures . put ( id , texture ) ; return id ; } <SENTENCE_END/>

(Copy Probability: 3.2%)

<SENTENCE_START> { int id = next texture id ++ ; textures . put ( id , texture ) ; return id ; } <SENTENCE_END/>


Original Name deallocate,texture,id

deallocate

<SENTENCE_START> { textures . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 0.6%)

<SENTENCE_START> { textures . remove ( id ) ; } <SENTENCE_END/>

texture

<SENTENCE_START> { textures . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 5.0%)

<SENTENCE_START> { textures . remove ( id ) ; } <SENTENCE_END/>

id

<SENTENCE_START> { textures . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 1.5%)

<SENTENCE_START> { textures . remove ( id ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { textures . remove ( id ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { textures . remove ( id ) ; } <SENTENCE_END/>


Original Name new,object

new

<SENTENCE_START> { return new key event ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return new key event ( ) ; } <SENTENCE_END/>

object

<SENTENCE_START> { return new key event ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.4%)

<SENTENCE_START> { return new key event ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return new key event ( ) ; } <SENTENCE_END/>

(Copy Probability: 1.0%)

<SENTENCE_START> { return new key event ( ) ; } <SENTENCE_END/>


Original Name new,object

new

<SENTENCE_START> { return new touch event ( ) ; } <SENTENCE_END/>

(Copy Probability: 0.5%)

<SENTENCE_START> { return new touch event ( ) ; } <SENTENCE_END/>

object

<SENTENCE_START> { return new touch event ( ) ; } <SENTENCE_END/>

(Copy Probability: 4.2%)

<SENTENCE_START> { return new touch event ( ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return new touch event ( ) ; } <SENTENCE_END/>

(Copy Probability: 4.8%)

<SENTENCE_START> { return new touch event ( ) ; } <SENTENCE_END/>


Original Name set,listeners

set

<SENTENCE_START> { if ( this . canvas != null ) { canvas . remove mouse listener ( this ) ; canvas . remove mouse motion listener ( this ) ; canvas . remove mouse wheel listener ( this ) ; canvas . remove key listener ( this ) ; } canvas . add mouse listener ( this ) ; canvas . add mouse motion listener ( this ) ; canvas . add mouse wheel listener ( this ) ; canvas . add key listener ( this ) ; canvas . set focus traversal keys enabled ( false ) ; this . canvas = canvas ; } <SENTENCE_END/>

(Copy Probability: 4.1%)

<SENTENCE_START> { if ( this . canvas != null ) { canvas . remove mouse listener ( this ) ; canvas . remove mouse motion listener ( this ) ; canvas . remove mouse wheel listener ( this ) ; canvas . remove key listener ( this ) ; } canvas . add mouse listener ( this ) ; canvas . add mouse motion listener ( this ) ; canvas . add mouse wheel listener ( this ) ; canvas . add key listener ( this ) ; canvas . set focus traversal keys enabled ( false ) ; this . canvas = canvas ; } <SENTENCE_END/>

listeners

<SENTENCE_START> { if ( this . canvas != null ) { canvas . remove mouse listener ( this ) ; canvas . remove mouse motion listener ( this ) ; canvas . remove mouse wheel listener ( this ) ; canvas . remove key listener ( this ) ; } canvas . add mouse listener ( this ) ; canvas . add mouse motion listener ( this ) ; canvas . add mouse wheel listener ( this ) ; canvas . add key listener ( this ) ; canvas . set focus traversal keys enabled ( false ) ; this . canvas = canvas ; } <SENTENCE_END/>

(Copy Probability: 42.6%)

<SENTENCE_START> { if ( this . canvas != null ) { canvas . remove mouse listener ( this ) ; canvas . remove mouse motion listener ( this ) ; canvas . remove mouse wheel listener ( this ) ; canvas . remove key listener ( this ) ; } canvas . add mouse listener ( this ) ; canvas . add mouse motion listener ( this ) ; canvas . add mouse wheel listener ( this ) ; canvas . add key listener ( this ) ; canvas . set focus traversal keys enabled ( false ) ; this . canvas = canvas ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( this . canvas != null ) { canvas . remove mouse listener ( this ) ; canvas . remove mouse motion listener ( this ) ; canvas . remove mouse wheel listener ( this ) ; canvas . remove key listener ( this ) ; } canvas . add mouse listener ( this ) ; canvas . add mouse motion listener ( this ) ; canvas . add mouse wheel listener ( this ) ; canvas . add key listener ( this ) ; canvas . set focus traversal keys enabled ( false ) ; this . canvas = canvas ; } <SENTENCE_END/>

(Copy Probability: 34.6%)

<SENTENCE_START> { if ( this . canvas != null ) { canvas . remove mouse listener ( this ) ; canvas . remove mouse motion listener ( this ) ; canvas . remove mouse wheel listener ( this ) ; canvas . remove key listener ( this ) ; } canvas . add mouse listener ( this ) ; canvas . add mouse motion listener ( this ) ; canvas . add mouse wheel listener ( this ) ; canvas . add key listener ( this ) ; canvas . set focus traversal keys enabled ( false ) ; this . canvas = canvas ; } <SENTENCE_END/>


Original Name get,text,input

get

<SENTENCE_START> { swing utilities . invoke later ( new runnable ( ) { @ override public void run ( ) { j panel panel = new j panel ( new flow layout ( ) ) ; j panel text panel = new j panel ( ) { public boolean is optimized drawing enabled ( ) { return false ; } } ; text panel . set layout ( new overlay layout ( text panel ) ) ; panel . add ( text panel ) ; final j text field text field = new j text field ( 20 ) ; text field . set text ( text ) ; text field . set alignment x ( 0.0f ) ; text panel . add ( text field ) ; final j label placeholder label = new j label ( hint ) ; placeholder label . set foreground ( color . gray ) ; placeholder label . set alignment x ( 0.0f ) ; text panel . add ( placeholder label , 0 ) ; text field . get document ( ) . add document listener ( new document listener ( ) { @ override public void remove update ( document event arg 0 ) { this . updated ( ) ; } @ override public void insert update ( document event arg 0 ) { this . updated ( ) ; } @ override public void changed update ( document event arg 0 ) { this . updated ( ) ; } private void updated ( ) { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } } ) ; j option pane pane = new j option pane ( panel , j option pane . question message , j option pane . ok cancel option , null , null , null ) ; pane . set initial value ( null ) ; pane . set component orientation ( j option pane . get root frame ( ) . get component orientation ( ) ) ; border border = text field . get border ( ) ; placeholder label . set border ( new empty border ( border . get border insets ( text field ) ) ) ; j dialog dialog = pane . create dialog ( null , title ) ; pane . select initial value ( ) ; dialog . add window focus listener ( new window focus listener ( ) { @ override public void window lost focus ( window event arg 0 ) { } @ override public void window gained focus ( window event arg 0 ) { text field . request focus in window ( ) ; } } ) ; dialog . set visible ( true ) ; dialog . dispose ( ) ; object selected value = pane . get value ( ) ; if ( selected value != null && ( selected value instanceof integer ) && ( ( integer ) selected value ) . int value ( ) == j option pane . ok option ) { listener . input ( text field . get text ( ) ) ; } else { listener . canceled ( ) ; } } } ) ; } <SENTENCE_END/>

(Copy Probability: 4.9%)

<SENTENCE_START> { swing utilities . invoke later ( new runnable ( ) { @ override public void run ( ) { j panel panel = new j panel ( new flow layout ( ) ) ; j panel text panel = new j panel ( ) { public boolean is optimized drawing enabled ( ) { return false ; } } ; text panel . set layout ( new overlay layout ( text panel ) ) ; panel . add ( text panel ) ; final j text field text field = new j text field ( 20 ) ; text field . set text ( text ) ; text field . set alignment x ( 0.0f ) ; text panel . add ( text field ) ; final j label placeholder label = new j label ( hint ) ; placeholder label . set foreground ( color . gray ) ; placeholder label . set alignment x ( 0.0f ) ; text panel . add ( placeholder label , 0 ) ; text field . get document ( ) . add document listener ( new document listener ( ) { @ override public void remove update ( document event arg 0 ) { this . updated ( ) ; } @ override public void insert update ( document event arg 0 ) { this . updated ( ) ; } @ override public void changed update ( document event arg 0 ) { this . updated ( ) ; } private void updated ( ) { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } } ) ; j option pane pane = new j option pane ( panel , j option pane . question message , j option pane . ok cancel option , null , null , null ) ; pane . set initial value ( null ) ; pane . set component orientation ( j option pane . get root frame ( ) . get component orientation ( ) ) ; border border = text field . get border ( ) ; placeholder label . set border ( new empty border ( border . get border insets ( text field ) ) ) ; j dialog dialog = pane . create dialog ( null , title ) ; pane . select initial value ( ) ; dialog . add window focus listener ( new window focus listener ( ) { @ override public void window lost focus ( window event arg 0 ) { } @ override public void window gained focus ( window event arg 0 ) { text field . request focus in window ( ) ; } } ) ; dialog . set visible ( true ) ; dialog . dispose ( ) ; object selected value = pane . get value ( ) ; if ( selected value != null && ( selected value instanceof integer ) && ( ( integer ) selected value ) . int value ( ) == j option pane . ok option ) { listener . input ( text field . get text ( ) ) ; } else { listener . canceled ( ) ; } } } ) ; } <SENTENCE_END/>

text

<SENTENCE_START> { swing utilities . invoke later ( new runnable ( ) { @ override public void run ( ) { j panel panel = new j panel ( new flow layout ( ) ) ; j panel text panel = new j panel ( ) { public boolean is optimized drawing enabled ( ) { return false ; } } ; text panel . set layout ( new overlay layout ( text panel ) ) ; panel . add ( text panel ) ; final j text field text field = new j text field ( 20 ) ; text field . set text ( text ) ; text field . set alignment x ( 0.0f ) ; text panel . add ( text field ) ; final j label placeholder label = new j label ( hint ) ; placeholder label . set foreground ( color . gray ) ; placeholder label . set alignment x ( 0.0f ) ; text panel . add ( placeholder label , 0 ) ; text field . get document ( ) . add document listener ( new document listener ( ) { @ override public void remove update ( document event arg 0 ) { this . updated ( ) ; } @ override public void insert update ( document event arg 0 ) { this . updated ( ) ; } @ override public void changed update ( document event arg 0 ) { this . updated ( ) ; } private void updated ( ) { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } } ) ; j option pane pane = new j option pane ( panel , j option pane . question message , j option pane . ok cancel option , null , null , null ) ; pane . set initial value ( null ) ; pane . set component orientation ( j option pane . get root frame ( ) . get component orientation ( ) ) ; border border = text field . get border ( ) ; placeholder label . set border ( new empty border ( border . get border insets ( text field ) ) ) ; j dialog dialog = pane . create dialog ( null , title ) ; pane . select initial value ( ) ; dialog . add window focus listener ( new window focus listener ( ) { @ override public void window lost focus ( window event arg 0 ) { } @ override public void window gained focus ( window event arg 0 ) { text field . request focus in window ( ) ; } } ) ; dialog . set visible ( true ) ; dialog . dispose ( ) ; object selected value = pane . get value ( ) ; if ( selected value != null && ( selected value instanceof integer ) && ( ( integer ) selected value ) . int value ( ) == j option pane . ok option ) { listener . input ( text field . get text ( ) ) ; } else { listener . canceled ( ) ; } } } ) ; } <SENTENCE_END/>

(Copy Probability: 13.6%)

<SENTENCE_START> { swing utilities . invoke later ( new runnable ( ) { @ override public void run ( ) { j panel panel = new j panel ( new flow layout ( ) ) ; j panel text panel = new j panel ( ) { public boolean is optimized drawing enabled ( ) { return false ; } } ; text panel . set layout ( new overlay layout ( text panel ) ) ; panel . add ( text panel ) ; final j text field text field = new j text field ( 20 ) ; text field . set text ( text ) ; text field . set alignment x ( 0.0f ) ; text panel . add ( text field ) ; final j label placeholder label = new j label ( hint ) ; placeholder label . set foreground ( color . gray ) ; placeholder label . set alignment x ( 0.0f ) ; text panel . add ( placeholder label , 0 ) ; text field . get document ( ) . add document listener ( new document listener ( ) { @ override public void remove update ( document event arg 0 ) { this . updated ( ) ; } @ override public void insert update ( document event arg 0 ) { this . updated ( ) ; } @ override public void changed update ( document event arg 0 ) { this . updated ( ) ; } private void updated ( ) { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } } ) ; j option pane pane = new j option pane ( panel , j option pane . question message , j option pane . ok cancel option , null , null , null ) ; pane . set initial value ( null ) ; pane . set component orientation ( j option pane . get root frame ( ) . get component orientation ( ) ) ; border border = text field . get border ( ) ; placeholder label . set border ( new empty border ( border . get border insets ( text field ) ) ) ; j dialog dialog = pane . create dialog ( null , title ) ; pane . select initial value ( ) ; dialog . add window focus listener ( new window focus listener ( ) { @ override public void window lost focus ( window event arg 0 ) { } @ override public void window gained focus ( window event arg 0 ) { text field . request focus in window ( ) ; } } ) ; dialog . set visible ( true ) ; dialog . dispose ( ) ; object selected value = pane . get value ( ) ; if ( selected value != null && ( selected value instanceof integer ) && ( ( integer ) selected value ) . int value ( ) == j option pane . ok option ) { listener . input ( text field . get text ( ) ) ; } else { listener . canceled ( ) ; } } } ) ; } <SENTENCE_END/>

input

<SENTENCE_START> { swing utilities . invoke later ( new runnable ( ) { @ override public void run ( ) { j panel panel = new j panel ( new flow layout ( ) ) ; j panel text panel = new j panel ( ) { public boolean is optimized drawing enabled ( ) { return false ; } } ; text panel . set layout ( new overlay layout ( text panel ) ) ; panel . add ( text panel ) ; final j text field text field = new j text field ( 20 ) ; text field . set text ( text ) ; text field . set alignment x ( 0.0f ) ; text panel . add ( text field ) ; final j label placeholder label = new j label ( hint ) ; placeholder label . set foreground ( color . gray ) ; placeholder label . set alignment x ( 0.0f ) ; text panel . add ( placeholder label , 0 ) ; text field . get document ( ) . add document listener ( new document listener ( ) { @ override public void remove update ( document event arg 0 ) { this . updated ( ) ; } @ override public void insert update ( document event arg 0 ) { this . updated ( ) ; } @ override public void changed update ( document event arg 0 ) { this . updated ( ) ; } private void updated ( ) { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } } ) ; j option pane pane = new j option pane ( panel , j option pane . question message , j option pane . ok cancel option , null , null , null ) ; pane . set initial value ( null ) ; pane . set component orientation ( j option pane . get root frame ( ) . get component orientation ( ) ) ; border border = text field . get border ( ) ; placeholder label . set border ( new empty border ( border . get border insets ( text field ) ) ) ; j dialog dialog = pane . create dialog ( null , title ) ; pane . select initial value ( ) ; dialog . add window focus listener ( new window focus listener ( ) { @ override public void window lost focus ( window event arg 0 ) { } @ override public void window gained focus ( window event arg 0 ) { text field . request focus in window ( ) ; } } ) ; dialog . set visible ( true ) ; dialog . dispose ( ) ; object selected value = pane . get value ( ) ; if ( selected value != null && ( selected value instanceof integer ) && ( ( integer ) selected value ) . int value ( ) == j option pane . ok option ) { listener . input ( text field . get text ( ) ) ; } else { listener . canceled ( ) ; } } } ) ; } <SENTENCE_END/>

(Copy Probability: 6.9%)

<SENTENCE_START> { swing utilities . invoke later ( new runnable ( ) { @ override public void run ( ) { j panel panel = new j panel ( new flow layout ( ) ) ; j panel text panel = new j panel ( ) { public boolean is optimized drawing enabled ( ) { return false ; } } ; text panel . set layout ( new overlay layout ( text panel ) ) ; panel . add ( text panel ) ; final j text field text field = new j text field ( 20 ) ; text field . set text ( text ) ; text field . set alignment x ( 0.0f ) ; text panel . add ( text field ) ; final j label placeholder label = new j label ( hint ) ; placeholder label . set foreground ( color . gray ) ; placeholder label . set alignment x ( 0.0f ) ; text panel . add ( placeholder label , 0 ) ; text field . get document ( ) . add document listener ( new document listener ( ) { @ override public void remove update ( document event arg 0 ) { this . updated ( ) ; } @ override public void insert update ( document event arg 0 ) { this . updated ( ) ; } @ override public void changed update ( document event arg 0 ) { this . updated ( ) ; } private void updated ( ) { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } } ) ; j option pane pane = new j option pane ( panel , j option pane . question message , j option pane . ok cancel option , null , null , null ) ; pane . set initial value ( null ) ; pane . set component orientation ( j option pane . get root frame ( ) . get component orientation ( ) ) ; border border = text field . get border ( ) ; placeholder label . set border ( new empty border ( border . get border insets ( text field ) ) ) ; j dialog dialog = pane . create dialog ( null , title ) ; pane . select initial value ( ) ; dialog . add window focus listener ( new window focus listener ( ) { @ override public void window lost focus ( window event arg 0 ) { } @ override public void window gained focus ( window event arg 0 ) { text field . request focus in window ( ) ; } } ) ; dialog . set visible ( true ) ; dialog . dispose ( ) ; object selected value = pane . get value ( ) ; if ( selected value != null && ( selected value instanceof integer ) && ( ( integer ) selected value ) . int value ( ) == j option pane . ok option ) { listener . input ( text field . get text ( ) ) ; } else { listener . canceled ( ) ; } } } ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { swing utilities . invoke later ( new runnable ( ) { @ override public void run ( ) { j panel panel = new j panel ( new flow layout ( ) ) ; j panel text panel = new j panel ( ) { public boolean is optimized drawing enabled ( ) { return false ; } } ; text panel . set layout ( new overlay layout ( text panel ) ) ; panel . add ( text panel ) ; final j text field text field = new j text field ( 20 ) ; text field . set text ( text ) ; text field . set alignment x ( 0.0f ) ; text panel . add ( text field ) ; final j label placeholder label = new j label ( hint ) ; placeholder label . set foreground ( color . gray ) ; placeholder label . set alignment x ( 0.0f ) ; text panel . add ( placeholder label , 0 ) ; text field . get document ( ) . add document listener ( new document listener ( ) { @ override public void remove update ( document event arg 0 ) { this . updated ( ) ; } @ override public void insert update ( document event arg 0 ) { this . updated ( ) ; } @ override public void changed update ( document event arg 0 ) { this . updated ( ) ; } private void updated ( ) { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } } ) ; j option pane pane = new j option pane ( panel , j option pane . question message , j option pane . ok cancel option , null , null , null ) ; pane . set initial value ( null ) ; pane . set component orientation ( j option pane . get root frame ( ) . get component orientation ( ) ) ; border border = text field . get border ( ) ; placeholder label . set border ( new empty border ( border . get border insets ( text field ) ) ) ; j dialog dialog = pane . create dialog ( null , title ) ; pane . select initial value ( ) ; dialog . add window focus listener ( new window focus listener ( ) { @ override public void window lost focus ( window event arg 0 ) { } @ override public void window gained focus ( window event arg 0 ) { text field . request focus in window ( ) ; } } ) ; dialog . set visible ( true ) ; dialog . dispose ( ) ; object selected value = pane . get value ( ) ; if ( selected value != null && ( selected value instanceof integer ) && ( ( integer ) selected value ) . int value ( ) == j option pane . ok option ) { listener . input ( text field . get text ( ) ) ; } else { listener . canceled ( ) ; } } } ) ; } <SENTENCE_END/>

(Copy Probability: 5.1%)

<SENTENCE_START> { swing utilities . invoke later ( new runnable ( ) { @ override public void run ( ) { j panel panel = new j panel ( new flow layout ( ) ) ; j panel text panel = new j panel ( ) { public boolean is optimized drawing enabled ( ) { return false ; } } ; text panel . set layout ( new overlay layout ( text panel ) ) ; panel . add ( text panel ) ; final j text field text field = new j text field ( 20 ) ; text field . set text ( text ) ; text field . set alignment x ( 0.0f ) ; text panel . add ( text field ) ; final j label placeholder label = new j label ( hint ) ; placeholder label . set foreground ( color . gray ) ; placeholder label . set alignment x ( 0.0f ) ; text panel . add ( placeholder label , 0 ) ; text field . get document ( ) . add document listener ( new document listener ( ) { @ override public void remove update ( document event arg 0 ) { this . updated ( ) ; } @ override public void insert update ( document event arg 0 ) { this . updated ( ) ; } @ override public void changed update ( document event arg 0 ) { this . updated ( ) ; } private void updated ( ) { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } } ) ; j option pane pane = new j option pane ( panel , j option pane . question message , j option pane . ok cancel option , null , null , null ) ; pane . set initial value ( null ) ; pane . set component orientation ( j option pane . get root frame ( ) . get component orientation ( ) ) ; border border = text field . get border ( ) ; placeholder label . set border ( new empty border ( border . get border insets ( text field ) ) ) ; j dialog dialog = pane . create dialog ( null , title ) ; pane . select initial value ( ) ; dialog . add window focus listener ( new window focus listener ( ) { @ override public void window lost focus ( window event arg 0 ) { } @ override public void window gained focus ( window event arg 0 ) { text field . request focus in window ( ) ; } } ) ; dialog . set visible ( true ) ; dialog . dispose ( ) ; object selected value = pane . get value ( ) ; if ( selected value != null && ( selected value instanceof integer ) && ( ( integer ) selected value ) . int value ( ) == j option pane . ok option ) { listener . input ( text field . get text ( ) ) ; } else { listener . canceled ( ) ; } } } ) ; } <SENTENCE_END/>


Original Name is,optimized,drawing,enabled

is

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.2%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

optimized

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.3%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

drawing

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 0.1%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

enabled

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 1.4%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return false ; } <SENTENCE_END/>

(Copy Probability: 2.3%)

<SENTENCE_START> { return false ; } <SENTENCE_END/>


Original Name updated

updated

<SENTENCE_START> { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } <SENTENCE_END/>

(Copy Probability: 2.1%)

<SENTENCE_START> { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } <SENTENCE_END/>

(Copy Probability: 12.2%)

<SENTENCE_START> { if ( text field . get text ( ) . length ( ) == 0 ) placeholder label . set visible ( true ) ; else placeholder label . set visible ( false ) ; } <SENTENCE_END/>


Original Name process,events

process

<SENTENCE_START> { synchronized ( this ) { just touched = false ; if ( key just pressed ) { key just pressed = false ; for ( int i = 0 ; i < just pressed keys . length ; i ++ ) { just pressed keys [ i ] = false ; } } if ( processor != null ) { input processor processor = this . processor ; int len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { key event e = key events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case key event . key down : processor . key down ( e . key code ) ; key just pressed = true ; just pressed keys [ e . key code ] = true ; break ; case key event . key up : processor . key up ( e . key code ) ; break ; case key event . key typed : processor . key typed ( e . key char ) ; } used key events . free ( e ) ; } len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event e = touch events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case touch event . touch down : processor . touch down ( e . x , e . y , e . pointer , e . button ) ; just touched = true ; break ; case touch event . touch up : processor . touch up ( e . x , e . y , e . pointer , e . button ) ; break ; case touch event . touch dragged : processor . touch dragged ( e . x , e . y , e . pointer ) ; break ; case touch event . touch moved : processor . mouse moved ( e . x , e . y ) ; break ; case touch event . touch scrolled : processor . scrolled ( e . scroll amount ) ; break ; } used touch events . free ( e ) ; } } else { int len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event event = touch events . get ( i ) ; if ( event . type == touch event . touch down ) just touched = true ; used touch events . free ( event ) ; } len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { used key events . free ( key events . get ( i ) ) ; } } if ( touch events . size ( ) == 0 ) { delta x = 0 ; delta y = 0 ; } key events . clear ( ) ; touch events . clear ( ) ; } } <SENTENCE_END/>

(Copy Probability: 4.1%)

<SENTENCE_START> { synchronized ( this ) { just touched = false ; if ( key just pressed ) { key just pressed = false ; for ( int i = 0 ; i < just pressed keys . length ; i ++ ) { just pressed keys [ i ] = false ; } } if ( processor != null ) { input processor processor = this . processor ; int len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { key event e = key events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case key event . key down : processor . key down ( e . key code ) ; key just pressed = true ; just pressed keys [ e . key code ] = true ; break ; case key event . key up : processor . key up ( e . key code ) ; break ; case key event . key typed : processor . key typed ( e . key char ) ; } used key events . free ( e ) ; } len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event e = touch events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case touch event . touch down : processor . touch down ( e . x , e . y , e . pointer , e . button ) ; just touched = true ; break ; case touch event . touch up : processor . touch up ( e . x , e . y , e . pointer , e . button ) ; break ; case touch event . touch dragged : processor . touch dragged ( e . x , e . y , e . pointer ) ; break ; case touch event . touch moved : processor . mouse moved ( e . x , e . y ) ; break ; case touch event . touch scrolled : processor . scrolled ( e . scroll amount ) ; break ; } used touch events . free ( e ) ; } } else { int len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event event = touch events . get ( i ) ; if ( event . type == touch event . touch down ) just touched = true ; used touch events . free ( event ) ; } len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { used key events . free ( key events . get ( i ) ) ; } } if ( touch events . size ( ) == 0 ) { delta x = 0 ; delta y = 0 ; } key events . clear ( ) ; touch events . clear ( ) ; } } <SENTENCE_END/>

events

<SENTENCE_START> { synchronized ( this ) { just touched = false ; if ( key just pressed ) { key just pressed = false ; for ( int i = 0 ; i < just pressed keys . length ; i ++ ) { just pressed keys [ i ] = false ; } } if ( processor != null ) { input processor processor = this . processor ; int len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { key event e = key events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case key event . key down : processor . key down ( e . key code ) ; key just pressed = true ; just pressed keys [ e . key code ] = true ; break ; case key event . key up : processor . key up ( e . key code ) ; break ; case key event . key typed : processor . key typed ( e . key char ) ; } used key events . free ( e ) ; } len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event e = touch events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case touch event . touch down : processor . touch down ( e . x , e . y , e . pointer , e . button ) ; just touched = true ; break ; case touch event . touch up : processor . touch up ( e . x , e . y , e . pointer , e . button ) ; break ; case touch event . touch dragged : processor . touch dragged ( e . x , e . y , e . pointer ) ; break ; case touch event . touch moved : processor . mouse moved ( e . x , e . y ) ; break ; case touch event . touch scrolled : processor . scrolled ( e . scroll amount ) ; break ; } used touch events . free ( e ) ; } } else { int len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event event = touch events . get ( i ) ; if ( event . type == touch event . touch down ) just touched = true ; used touch events . free ( event ) ; } len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { used key events . free ( key events . get ( i ) ) ; } } if ( touch events . size ( ) == 0 ) { delta x = 0 ; delta y = 0 ; } key events . clear ( ) ; touch events . clear ( ) ; } } <SENTENCE_END/>

(Copy Probability: 14.7%)

<SENTENCE_START> { synchronized ( this ) { just touched = false ; if ( key just pressed ) { key just pressed = false ; for ( int i = 0 ; i < just pressed keys . length ; i ++ ) { just pressed keys [ i ] = false ; } } if ( processor != null ) { input processor processor = this . processor ; int len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { key event e = key events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case key event . key down : processor . key down ( e . key code ) ; key just pressed = true ; just pressed keys [ e . key code ] = true ; break ; case key event . key up : processor . key up ( e . key code ) ; break ; case key event . key typed : processor . key typed ( e . key char ) ; } used key events . free ( e ) ; } len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event e = touch events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case touch event . touch down : processor . touch down ( e . x , e . y , e . pointer , e . button ) ; just touched = true ; break ; case touch event . touch up : processor . touch up ( e . x , e . y , e . pointer , e . button ) ; break ; case touch event . touch dragged : processor . touch dragged ( e . x , e . y , e . pointer ) ; break ; case touch event . touch moved : processor . mouse moved ( e . x , e . y ) ; break ; case touch event . touch scrolled : processor . scrolled ( e . scroll amount ) ; break ; } used touch events . free ( e ) ; } } else { int len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event event = touch events . get ( i ) ; if ( event . type == touch event . touch down ) just touched = true ; used touch events . free ( event ) ; } len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { used key events . free ( key events . get ( i ) ) ; } } if ( touch events . size ( ) == 0 ) { delta x = 0 ; delta y = 0 ; } key events . clear ( ) ; touch events . clear ( ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { synchronized ( this ) { just touched = false ; if ( key just pressed ) { key just pressed = false ; for ( int i = 0 ; i < just pressed keys . length ; i ++ ) { just pressed keys [ i ] = false ; } } if ( processor != null ) { input processor processor = this . processor ; int len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { key event e = key events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case key event . key down : processor . key down ( e . key code ) ; key just pressed = true ; just pressed keys [ e . key code ] = true ; break ; case key event . key up : processor . key up ( e . key code ) ; break ; case key event . key typed : processor . key typed ( e . key char ) ; } used key events . free ( e ) ; } len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event e = touch events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case touch event . touch down : processor . touch down ( e . x , e . y , e . pointer , e . button ) ; just touched = true ; break ; case touch event . touch up : processor . touch up ( e . x , e . y , e . pointer , e . button ) ; break ; case touch event . touch dragged : processor . touch dragged ( e . x , e . y , e . pointer ) ; break ; case touch event . touch moved : processor . mouse moved ( e . x , e . y ) ; break ; case touch event . touch scrolled : processor . scrolled ( e . scroll amount ) ; break ; } used touch events . free ( e ) ; } } else { int len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event event = touch events . get ( i ) ; if ( event . type == touch event . touch down ) just touched = true ; used touch events . free ( event ) ; } len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { used key events . free ( key events . get ( i ) ) ; } } if ( touch events . size ( ) == 0 ) { delta x = 0 ; delta y = 0 ; } key events . clear ( ) ; touch events . clear ( ) ; } } <SENTENCE_END/>

(Copy Probability: 15.2%)

<SENTENCE_START> { synchronized ( this ) { just touched = false ; if ( key just pressed ) { key just pressed = false ; for ( int i = 0 ; i < just pressed keys . length ; i ++ ) { just pressed keys [ i ] = false ; } } if ( processor != null ) { input processor processor = this . processor ; int len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { key event e = key events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case key event . key down : processor . key down ( e . key code ) ; key just pressed = true ; just pressed keys [ e . key code ] = true ; break ; case key event . key up : processor . key up ( e . key code ) ; break ; case key event . key typed : processor . key typed ( e . key char ) ; } used key events . free ( e ) ; } len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event e = touch events . get ( i ) ; current event time stamp = e . time stamp ; switch ( e . type ) { case touch event . touch down : processor . touch down ( e . x , e . y , e . pointer , e . button ) ; just touched = true ; break ; case touch event . touch up : processor . touch up ( e . x , e . y , e . pointer , e . button ) ; break ; case touch event . touch dragged : processor . touch dragged ( e . x , e . y , e . pointer ) ; break ; case touch event . touch moved : processor . mouse moved ( e . x , e . y ) ; break ; case touch event . touch scrolled : processor . scrolled ( e . scroll amount ) ; break ; } used touch events . free ( e ) ; } } else { int len = touch events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { touch event event = touch events . get ( i ) ; if ( event . type == touch event . touch down ) just touched = true ; used touch events . free ( event ) ; } len = key events . size ( ) ; for ( int i = 0 ; i < len ; i ++ ) { used key events . free ( key events . get ( i ) ) ; } } if ( touch events . size ( ) == 0 ) { delta x = 0 ; delta y = 0 ; } key events . clear ( ) ; touch events . clear ( ) ; } } <SENTENCE_END/>


Original Name check,catched

check

<SENTENCE_START> { if ( catched && robot != null && canvas . is showing ( ) ) { int x = math . max ( 0 , math . min ( e . get x ( ) , canvas . get width ( ) ) - 1 ) + canvas . get location on screen ( ) . x ; int y = math . max ( 0 , math . min ( e . get y ( ) , canvas . get height ( ) ) - 1 ) + canvas . get location on screen ( ) . y ; if ( e . get x ( ) < 0 || e . get x ( ) >= canvas . get width ( ) || e . get y ( ) < 0 || e . get y ( ) >= canvas . get height ( ) ) { robot . mouse move ( x , y ) ; } } } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { if ( catched && robot != null && canvas . is showing ( ) ) { int x = math . max ( 0 , math . min ( e . get x ( ) , canvas . get width ( ) ) - 1 ) + canvas . get location on screen ( ) . x ; int y = math . max ( 0 , math . min ( e . get y ( ) , canvas . get height ( ) ) - 1 ) + canvas . get location on screen ( ) . y ; if ( e . get x ( ) < 0 || e . get x ( ) >= canvas . get width ( ) || e . get y ( ) < 0 || e . get y ( ) >= canvas . get height ( ) ) { robot . mouse move ( x , y ) ; } } } <SENTENCE_END/>

catched

<SENTENCE_START> { if ( catched && robot != null && canvas . is showing ( ) ) { int x = math . max ( 0 , math . min ( e . get x ( ) , canvas . get width ( ) ) - 1 ) + canvas . get location on screen ( ) . x ; int y = math . max ( 0 , math . min ( e . get y ( ) , canvas . get height ( ) ) - 1 ) + canvas . get location on screen ( ) . y ; if ( e . get x ( ) < 0 || e . get x ( ) >= canvas . get width ( ) || e . get y ( ) < 0 || e . get y ( ) >= canvas . get height ( ) ) { robot . mouse move ( x , y ) ; } } } <SENTENCE_END/>

(Copy Probability: 18.2%)

<SENTENCE_START> { if ( catched && robot != null && canvas . is showing ( ) ) { int x = math . max ( 0 , math . min ( e . get x ( ) , canvas . get width ( ) ) - 1 ) + canvas . get location on screen ( ) . x ; int y = math . max ( 0 , math . min ( e . get y ( ) , canvas . get height ( ) ) - 1 ) + canvas . get location on screen ( ) . y ; if ( e . get x ( ) < 0 || e . get x ( ) >= canvas . get width ( ) || e . get y ( ) < 0 || e . get y ( ) >= canvas . get height ( ) ) { robot . mouse move ( x , y ) ; } } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( catched && robot != null && canvas . is showing ( ) ) { int x = math . max ( 0 , math . min ( e . get x ( ) , canvas . get width ( ) ) - 1 ) + canvas . get location on screen ( ) . x ; int y = math . max ( 0 , math . min ( e . get y ( ) , canvas . get height ( ) ) - 1 ) + canvas . get location on screen ( ) . y ; if ( e . get x ( ) < 0 || e . get x ( ) >= canvas . get width ( ) || e . get y ( ) < 0 || e . get y ( ) >= canvas . get height ( ) ) { robot . mouse move ( x , y ) ; } } } <SENTENCE_END/>

(Copy Probability: 21.1%)

<SENTENCE_START> { if ( catched && robot != null && canvas . is showing ( ) ) { int x = math . max ( 0 , math . min ( e . get x ( ) , canvas . get width ( ) ) - 1 ) + canvas . get location on screen ( ) . x ; int y = math . max ( 0 , math . min ( e . get y ( ) , canvas . get height ( ) ) - 1 ) + canvas . get location on screen ( ) . y ; if ( e . get x ( ) < 0 || e . get x ( ) >= canvas . get width ( ) || e . get y ( ) < 0 || e . get y ( ) >= canvas . get height ( ) ) { robot . mouse move ( x , y ) ; } } } <SENTENCE_END/>


Original Name to,gdx,button

to

<SENTENCE_START> { if ( swing button == mouse event . button 1 ) return buttons . left ; if ( swing button == mouse event . button 2 ) return buttons . middle ; if ( swing button == mouse event . button 3 ) return buttons . right ; return buttons . left ; } <SENTENCE_END/>

(Copy Probability: 4.4%)

<SENTENCE_START> { if ( swing button == mouse event . button 1 ) return buttons . left ; if ( swing button == mouse event . button 2 ) return buttons . middle ; if ( swing button == mouse event . button 3 ) return buttons . right ; return buttons . left ; } <SENTENCE_END/>

gdx

<SENTENCE_START> { if ( swing button == mouse event . button 1 ) return buttons . left ; if ( swing button == mouse event . button 2 ) return buttons . middle ; if ( swing button == mouse event . button 3 ) return buttons . right ; return buttons . left ; } <SENTENCE_END/>

(Copy Probability: 82.7%)

<SENTENCE_START> { if ( swing button == mouse event . button 1 ) return buttons . left ; if ( swing button == mouse event . button 2 ) return buttons . middle ; if ( swing button == mouse event . button 3 ) return buttons . right ; return buttons . left ; } <SENTENCE_END/>

button

<SENTENCE_START> { if ( swing button == mouse event . button 1 ) return buttons . left ; if ( swing button == mouse event . button 2 ) return buttons . middle ; if ( swing button == mouse event . button 3 ) return buttons . right ; return buttons . left ; } <SENTENCE_END/>

(Copy Probability: 56.5%)

<SENTENCE_START> { if ( swing button == mouse event . button 1 ) return buttons . left ; if ( swing button == mouse event . button 2 ) return buttons . middle ; if ( swing button == mouse event . button 3 ) return buttons . right ; return buttons . left ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( swing button == mouse event . button 1 ) return buttons . left ; if ( swing button == mouse event . button 2 ) return buttons . middle ; if ( swing button == mouse event . button 3 ) return buttons . right ; return buttons . left ; } <SENTENCE_END/>

(Copy Probability: 5.1%)

<SENTENCE_START> { if ( swing button == mouse event . button 1 ) return buttons . left ; if ( swing button == mouse event . button 2 ) return buttons . middle ; if ( swing button == mouse event . button 3 ) return buttons . right ; return buttons . left ; } <SENTENCE_END/>


Original Name translate,key,code

translate

<SENTENCE_START> { switch ( key code ) { case java . awt . event . key event . vk add : return input . keys . plus ; case java . awt . event . key event . vk subtract : return input . keys . minus ; case java . awt . event . key event . vk 0 : return input . keys . num 0 ; case java . awt . event . key event . vk 1 : return input . keys . num 1 ; case java . awt . event . key event . vk 2 : return input . keys . num 2 ; case java . awt . event . key event . vk 3 : return input . keys . num 3 ; case java . awt . event . key event . vk 4 : return input . keys . num 4 ; case java . awt . event . key event . vk 5 : return input . keys . num 5 ; case java . awt . event . key event . vk 6 : return input . keys . num 6 ; case java . awt . event . key event . vk 7 : return input . keys . num 7 ; case java . awt . event . key event . vk 8 : return input . keys . num 8 ; case java . awt . event . key event . vk 9 : return input . keys . num 9 ; case java . awt . event . key event . vk a : return input . keys . a ; case java . awt . event . key event . vk b : return input . keys . b ; case java . awt . event . key event . vk c : return input . keys . c ; case java . awt . event . key event . vk d : return input . keys . d ; case java . awt . event . key event . vk e : return input . keys . e ; case java . awt . event . key event . vk f : return input . keys . f ; case java . awt . event . key event . vk g : return input . keys . g ; case java . awt . event . key event . vk h : return input . keys . h ; case java . awt . event . key event . vk i : return input . keys . i ; case java . awt . event . key event . vk j : return input . keys . j ; case java . awt . event . key event . vk k : return input . keys . k ; case java . awt . event . key event . vk l : return input . keys . l ; case java . awt . event . key event . vk m : return input . keys . m ; case java . awt . event . key event . vk n : return input . keys . n ; case java . awt . event . key event . vk o : return input . keys . o ; case java . awt . event . key event . vk p : return input . keys . p ; case java . awt . event . key event . vk q : return input . keys . q ; case java . awt . event . key event . vk r : return input . keys . r ; case java . awt . event . key event . vk s : return input . keys . s ; case java . awt . event . key event . vk t : return input . keys . t ; case java . awt . event . key event . vk u : return input . keys . u ; case java . awt . event . key event . vk v : return input . keys . v ; case java . awt . event . key event . vk w : return input . keys . w ; case java . awt . event . key event . vk x : return input . keys . x ; case java . awt . event . key event . vk y : return input . keys . y ; case java . awt . event . key event . vk z : return input . keys . z ; case java . awt . event . key event . vk alt : return input . keys . alt left ; case java . awt . event . key event . vk alt graph : return input . keys . alt right ; case java . awt . event . key event . vk back slash : return input . keys . backslash ; case java . awt . event . key event . vk comma : return input . keys . comma ; case java . awt . event . key event . vk delete : return input . keys . forward del ; case java . awt . event . key event . vk left : return input . keys . dpad left ; case java . awt . event . key event . vk right : return input . keys . dpad right ; case java . awt . event . key event . vk up : return input . keys . dpad up ; case java . awt . event . key event . vk down : return input . keys . dpad down ; case java . awt . event . key event . vk enter : return input . keys . enter ; case java . awt . event . key event . vk home : return input . keys . home ; case java . awt . event . key event . vk minus : return input . keys . minus ; case java . awt . event . key event . vk period : return input . keys . period ; case java . awt . event . key event . vk plus : return input . keys . plus ; case java . awt . event . key event . vk semicolon : return input . keys . semicolon ; case java . awt . event . key event . vk shift : return input . keys . shift left ; case java . awt . event . key event . vk slash : return input . keys . slash ; case java . awt . event . key event . vk space : return input . keys . space ; case java . awt . event . key event . vk tab : return input . keys . tab ; case java . awt . event . key event . vk back space : return input . keys . del ; case java . awt . event . key event . vk control : return input . keys . control left ; case java . awt . event . key event . vk escape : return input . keys . escape ; case java . awt . event . key event . vk end : return input . keys . end ; case java . awt . event . key event . vk insert : return input . keys . insert ; case java . awt . event . key event . vk page up : return input . keys . page up ; case java . awt . event . key event . vk page down : return input . keys . page down ; case java . awt . event . key event . vk f 1 : return input . keys . f 1 ; case java . awt . event . key event . vk f 2 : return input . keys . f 2 ; case java . awt . event . key event . vk f 3 : return input . keys . f 3 ; case java . awt . event . key event . vk f 4 : return input . keys . f 4 ; case java . awt . event . key event . vk f 5 : return input . keys . f 5 ; case java . awt . event . key event . vk f 6 : return input . keys . f 6 ; case java . awt . event . key event . vk f 7 : return input . keys . f 7 ; case java . awt . event . key event . vk f 8 : return input . keys . f 8 ; case java . awt . event . key event . vk f 9 : return input . keys . f 9 ; case java . awt . event . key event . vk f 10 : return input . keys . f 10 ; case java . awt . event . key event . vk f 11 : return input . keys . f 11 ; case java . awt . event . key event . vk f 12 : return input . keys . f 12 ; case java . awt . event . key event . vk colon : return input . keys . colon ; case java . awt . event . key event . vk numpad 0 : return input . keys . num 0 ; case java . awt . event . key event . vk numpad 1 : return input . keys . num 1 ; case java . awt . event . key event . vk numpad 2 : return input . keys . num 2 ; case java . awt . event . key event . vk numpad 3 : return input . keys . num 3 ; case java . awt . event . key event . vk numpad 4 : return input . keys . num 4 ; case java . awt . event . key event . vk numpad 5 : return input . keys . num 5 ; case java . awt . event . key event . vk numpad 6 : return input . keys . num 6 ; case java . awt . event . key event . vk numpad 7 : return input . keys . num 7 ; case java . awt . event . key event . vk numpad 8 : return input . keys . num 8 ; case java . awt . event . key event . vk numpad 9 : return input . keys . num 9 ; } return input . keys . unknown ; } <SENTENCE_END/>

(Copy Probability: 3.5%)

<SENTENCE_START> { switch ( key code ) { case java . awt . event . key event . vk add : return input . keys . plus ; case java . awt . event . key event . vk subtract : return input . keys . minus ; case java . awt . event . key event . vk 0 : return input . keys . num 0 ; case java . awt . event . key event . vk 1 : return input . keys . num 1 ; case java . awt . event . key event . vk 2 : return input . keys . num 2 ; case java . awt . event . key event . vk 3 : return input . keys . num 3 ; case java . awt . event . key event . vk 4 : return input . keys . num 4 ; case java . awt . event . key event . vk 5 : return input . keys . num 5 ; case java . awt . event . key event . vk 6 : return input . keys . num 6 ; case java . awt . event . key event . vk 7 : return input . keys . num 7 ; case java . awt . event . key event . vk 8 : return input . keys . num 8 ; case java . awt . event . key event . vk 9 : return input . keys . num 9 ; case java . awt . event . key event . vk a : return input . keys . a ; case java . awt . event . key event . vk b : return input . keys . b ; case java . awt . event . key event . vk c : return input . keys . c ; case java . awt . event . key event . vk d : return input . keys . d ; case java . awt . event . key event . vk e : return input . keys . e ; case java . awt . event . key event . vk f : return input . keys . f ; case java . awt . event . key event . vk g : return input . keys . g ; case java . awt . event . key event . vk h : return input . keys . h ; case java . awt . event . key event . vk i : return input . keys . i ; case java . awt . event . key event . vk j : return input . keys . j ; case java . awt . event . key event . vk k : return input . keys . k ; case java . awt . event . key event . vk l : return input . keys . l ; case java . awt . event . key event . vk m : return input . keys . m ; case java . awt . event . key event . vk n : return input . keys . n ; case java . awt . event . key event . vk o : return input . keys . o ; case java . awt . event . key event . vk p : return input . keys . p ; case java . awt . event . key event . vk q : return input . keys . q ; case java . awt . event . key event . vk r : return input . keys . r ; case java . awt . event . key event . vk s : return input . keys . s ; case java . awt . event . key event . vk t : return input . keys . t ; case java . awt . event . key event . vk u : return input . keys . u ; case java . awt . event . key event . vk v : return input . keys . v ; case java . awt . event . key event . vk w : return input . keys . w ; case java . awt . event . key event . vk x : return input . keys . x ; case java . awt . event . key event . vk y : return input . keys . y ; case java . awt . event . key event . vk z : return input . keys . z ; case java . awt . event . key event . vk alt : return input . keys . alt left ; case java . awt . event . key event . vk alt graph : return input . keys . alt right ; case java . awt . event . key event . vk back slash : return input . keys . backslash ; case java . awt . event . key event . vk comma : return input . keys . comma ; case java . awt . event . key event . vk delete : return input . keys . forward del ; case java . awt . event . key event . vk left : return input . keys . dpad left ; case java . awt . event . key event . vk right : return input . keys . dpad right ; case java . awt . event . key event . vk up : return input . keys . dpad up ; case java . awt . event . key event . vk down : return input . keys . dpad down ; case java . awt . event . key event . vk enter : return input . keys . enter ; case java . awt . event . key event . vk home : return input . keys . home ; case java . awt . event . key event . vk minus : return input . keys . minus ; case java . awt . event . key event . vk period : return input . keys . period ; case java . awt . event . key event . vk plus : return input . keys . plus ; case java . awt . event . key event . vk semicolon : return input . keys . semicolon ; case java . awt . event . key event . vk shift : return input . keys . shift left ; case java . awt . event . key event . vk slash : return input . keys . slash ; case java . awt . event . key event . vk space : return input . keys . space ; case java . awt . event . key event . vk tab : return input . keys . tab ; case java . awt . event . key event . vk back space : return input . keys . del ; case java . awt . event . key event . vk control : return input . keys . control left ; case java . awt . event . key event . vk escape : return input . keys . escape ; case java . awt . event . key event . vk end : return input . keys . end ; case java . awt . event . key event . vk insert : return input . keys . insert ; case java . awt . event . key event . vk page up : return input . keys . page up ; case java . awt . event . key event . vk page down : return input . keys . page down ; case java . awt . event . key event . vk f 1 : return input . keys . f 1 ; case java . awt . event . key event . vk f 2 : return input . keys . f 2 ; case java . awt . event . key event . vk f 3 : return input . keys . f 3 ; case java . awt . event . key event . vk f 4 : return input . keys . f 4 ; case java . awt . event . key event . vk f 5 : return input . keys . f 5 ; case java . awt . event . key event . vk f 6 : return input . keys . f 6 ; case java . awt . event . key event . vk f 7 : return input . keys . f 7 ; case java . awt . event . key event . vk f 8 : return input . keys . f 8 ; case java . awt . event . key event . vk f 9 : return input . keys . f 9 ; case java . awt . event . key event . vk f 10 : return input . keys . f 10 ; case java . awt . event . key event . vk f 11 : return input . keys . f 11 ; case java . awt . event . key event . vk f 12 : return input . keys . f 12 ; case java . awt . event . key event . vk colon : return input . keys . colon ; case java . awt . event . key event . vk numpad 0 : return input . keys . num 0 ; case java . awt . event . key event . vk numpad 1 : return input . keys . num 1 ; case java . awt . event . key event . vk numpad 2 : return input . keys . num 2 ; case java . awt . event . key event . vk numpad 3 : return input . keys . num 3 ; case java . awt . event . key event . vk numpad 4 : return input . keys . num 4 ; case java . awt . event . key event . vk numpad 5 : return input . keys . num 5 ; case java . awt . event . key event . vk numpad 6 : return input . keys . num 6 ; case java . awt . event . key event . vk numpad 7 : return input . keys . num 7 ; case java . awt . event . key event . vk numpad 8 : return input . keys . num 8 ; case java . awt . event . key event . vk numpad 9 : return input . keys . num 9 ; } return input . keys . unknown ; } <SENTENCE_END/>

key

<SENTENCE_START> { switch ( key code ) { case java . awt . event . key event . vk add : return input . keys . plus ; case java . awt . event . key event . vk subtract : return input . keys . minus ; case java . awt . event . key event . vk 0 : return input . keys . num 0 ; case java . awt . event . key event . vk 1 : return input . keys . num 1 ; case java . awt . event . key event . vk 2 : return input . keys . num 2 ; case java . awt . event . key event . vk 3 : return input . keys . num 3 ; case java . awt . event . key event . vk 4 : return input . keys . num 4 ; case java . awt . event . key event . vk 5 : return input . keys . num 5 ; case java . awt . event . key event . vk 6 : return input . keys . num 6 ; case java . awt . event . key event . vk 7 : return input . keys . num 7 ; case java . awt . event . key event . vk 8 : return input . keys . num 8 ; case java . awt . event . key event . vk 9 : return input . keys . num 9 ; case java . awt . event . key event . vk a : return input . keys . a ; case java . awt . event . key event . vk b : return input . keys . b ; case java . awt . event . key event . vk c : return input . keys . c ; case java . awt . event . key event . vk d : return input . keys . d ; case java . awt . event . key event . vk e : return input . keys . e ; case java . awt . event . key event . vk f : return input . keys . f ; case java . awt . event . key event . vk g : return input . keys . g ; case java . awt . event . key event . vk h : return input . keys . h ; case java . awt . event . key event . vk i : return input . keys . i ; case java . awt . event . key event . vk j : return input . keys . j ; case java . awt . event . key event . vk k : return input . keys . k ; case java . awt . event . key event . vk l : return input . keys . l ; case java . awt . event . key event . vk m : return input . keys . m ; case java . awt . event . key event . vk n : return input . keys . n ; case java . awt . event . key event . vk o : return input . keys . o ; case java . awt . event . key event . vk p : return input . keys . p ; case java . awt . event . key event . vk q : return input . keys . q ; case java . awt . event . key event . vk r : return input . keys . r ; case java . awt . event . key event . vk s : return input . keys . s ; case java . awt . event . key event . vk t : return input . keys . t ; case java . awt . event . key event . vk u : return input . keys . u ; case java . awt . event . key event . vk v : return input . keys . v ; case java . awt . event . key event . vk w : return input . keys . w ; case java . awt . event . key event . vk x : return input . keys . x ; case java . awt . event . key event . vk y : return input . keys . y ; case java . awt . event . key event . vk z : return input . keys . z ; case java . awt . event . key event . vk alt : return input . keys . alt left ; case java . awt . event . key event . vk alt graph : return input . keys . alt right ; case java . awt . event . key event . vk back slash : return input . keys . backslash ; case java . awt . event . key event . vk comma : return input . keys . comma ; case java . awt . event . key event . vk delete : return input . keys . forward del ; case java . awt . event . key event . vk left : return input . keys . dpad left ; case java . awt . event . key event . vk right : return input . keys . dpad right ; case java . awt . event . key event . vk up : return input . keys . dpad up ; case java . awt . event . key event . vk down : return input . keys . dpad down ; case java . awt . event . key event . vk enter : return input . keys . enter ; case java . awt . event . key event . vk home : return input . keys . home ; case java . awt . event . key event . vk minus : return input . keys . minus ; case java . awt . event . key event . vk period : return input . keys . period ; case java . awt . event . key event . vk plus : return input . keys . plus ; case java . awt . event . key event . vk semicolon : return input . keys . semicolon ; case java . awt . event . key event . vk shift : return input . keys . shift left ; case java . awt . event . key event . vk slash : return input . keys . slash ; case java . awt . event . key event . vk space : return input . keys . space ; case java . awt . event . key event . vk tab : return input . keys . tab ; case java . awt . event . key event . vk back space : return input . keys . del ; case java . awt . event . key event . vk control : return input . keys . control left ; case java . awt . event . key event . vk escape : return input . keys . escape ; case java . awt . event . key event . vk end : return input . keys . end ; case java . awt . event . key event . vk insert : return input . keys . insert ; case java . awt . event . key event . vk page up : return input . keys . page up ; case java . awt . event . key event . vk page down : return input . keys . page down ; case java . awt . event . key event . vk f 1 : return input . keys . f 1 ; case java . awt . event . key event . vk f 2 : return input . keys . f 2 ; case java . awt . event . key event . vk f 3 : return input . keys . f 3 ; case java . awt . event . key event . vk f 4 : return input . keys . f 4 ; case java . awt . event . key event . vk f 5 : return input . keys . f 5 ; case java . awt . event . key event . vk f 6 : return input . keys . f 6 ; case java . awt . event . key event . vk f 7 : return input . keys . f 7 ; case java . awt . event . key event . vk f 8 : return input . keys . f 8 ; case java . awt . event . key event . vk f 9 : return input . keys . f 9 ; case java . awt . event . key event . vk f 10 : return input . keys . f 10 ; case java . awt . event . key event . vk f 11 : return input . keys . f 11 ; case java . awt . event . key event . vk f 12 : return input . keys . f 12 ; case java . awt . event . key event . vk colon : return input . keys . colon ; case java . awt . event . key event . vk numpad 0 : return input . keys . num 0 ; case java . awt . event . key event . vk numpad 1 : return input . keys . num 1 ; case java . awt . event . key event . vk numpad 2 : return input . keys . num 2 ; case java . awt . event . key event . vk numpad 3 : return input . keys . num 3 ; case java . awt . event . key event . vk numpad 4 : return input . keys . num 4 ; case java . awt . event . key event . vk numpad 5 : return input . keys . num 5 ; case java . awt . event . key event . vk numpad 6 : return input . keys . num 6 ; case java . awt . event . key event . vk numpad 7 : return input . keys . num 7 ; case java . awt . event . key event . vk numpad 8 : return input . keys . num 8 ; case java . awt . event . key event . vk numpad 9 : return input . keys . num 9 ; } return input . keys . unknown ; } <SENTENCE_END/>

(Copy Probability: 4.5%)

<SENTENCE_START> { switch ( key code ) { case java . awt . event . key event . vk add : return input . keys . plus ; case java . awt . event . key event . vk subtract : return input . keys . minus ; case java . awt . event . key event . vk 0 : return input . keys . num 0 ; case java . awt . event . key event . vk 1 : return input . keys . num 1 ; case java . awt . event . key event . vk 2 : return input . keys . num 2 ; case java . awt . event . key event . vk 3 : return input . keys . num 3 ; case java . awt . event . key event . vk 4 : return input . keys . num 4 ; case java . awt . event . key event . vk 5 : return input . keys . num 5 ; case java . awt . event . key event . vk 6 : return input . keys . num 6 ; case java . awt . event . key event . vk 7 : return input . keys . num 7 ; case java . awt . event . key event . vk 8 : return input . keys . num 8 ; case java . awt . event . key event . vk 9 : return input . keys . num 9 ; case java . awt . event . key event . vk a : return input . keys . a ; case java . awt . event . key event . vk b : return input . keys . b ; case java . awt . event . key event . vk c : return input . keys . c ; case java . awt . event . key event . vk d : return input . keys . d ; case java . awt . event . key event . vk e : return input . keys . e ; case java . awt . event . key event . vk f : return input . keys . f ; case java . awt . event . key event . vk g : return input . keys . g ; case java . awt . event . key event . vk h : return input . keys . h ; case java . awt . event . key event . vk i : return input . keys . i ; case java . awt . event . key event . vk j : return input . keys . j ; case java . awt . event . key event . vk k : return input . keys . k ; case java . awt . event . key event . vk l : return input . keys . l ; case java . awt . event . key event . vk m : return input . keys . m ; case java . awt . event . key event . vk n : return input . keys . n ; case java . awt . event . key event . vk o : return input . keys . o ; case java . awt . event . key event . vk p : return input . keys . p ; case java . awt . event . key event . vk q : return input . keys . q ; case java . awt . event . key event . vk r : return input . keys . r ; case java . awt . event . key event . vk s : return input . keys . s ; case java . awt . event . key event . vk t : return input . keys . t ; case java . awt . event . key event . vk u : return input . keys . u ; case java . awt . event . key event . vk v : return input . keys . v ; case java . awt . event . key event . vk w : return input . keys . w ; case java . awt . event . key event . vk x : return input . keys . x ; case java . awt . event . key event . vk y : return input . keys . y ; case java . awt . event . key event . vk z : return input . keys . z ; case java . awt . event . key event . vk alt : return input . keys . alt left ; case java . awt . event . key event . vk alt graph : return input . keys . alt right ; case java . awt . event . key event . vk back slash : return input . keys . backslash ; case java . awt . event . key event . vk comma : return input . keys . comma ; case java . awt . event . key event . vk delete : return input . keys . forward del ; case java . awt . event . key event . vk left : return input . keys . dpad left ; case java . awt . event . key event . vk right : return input . keys . dpad right ; case java . awt . event . key event . vk up : return input . keys . dpad up ; case java . awt . event . key event . vk down : return input . keys . dpad down ; case java . awt . event . key event . vk enter : return input . keys . enter ; case java . awt . event . key event . vk home : return input . keys . home ; case java . awt . event . key event . vk minus : return input . keys . minus ; case java . awt . event . key event . vk period : return input . keys . period ; case java . awt . event . key event . vk plus : return input . keys . plus ; case java . awt . event . key event . vk semicolon : return input . keys . semicolon ; case java . awt . event . key event . vk shift : return input . keys . shift left ; case java . awt . event . key event . vk slash : return input . keys . slash ; case java . awt . event . key event . vk space : return input . keys . space ; case java . awt . event . key event . vk tab : return input . keys . tab ; case java . awt . event . key event . vk back space : return input . keys . del ; case java . awt . event . key event . vk control : return input . keys . control left ; case java . awt . event . key event . vk escape : return input . keys . escape ; case java . awt . event . key event . vk end : return input . keys . end ; case java . awt . event . key event . vk insert : return input . keys . insert ; case java . awt . event . key event . vk page up : return input . keys . page up ; case java . awt . event . key event . vk page down : return input . keys . page down ; case java . awt . event . key event . vk f 1 : return input . keys . f 1 ; case java . awt . event . key event . vk f 2 : return input . keys . f 2 ; case java . awt . event . key event . vk f 3 : return input . keys . f 3 ; case java . awt . event . key event . vk f 4 : return input . keys . f 4 ; case java . awt . event . key event . vk f 5 : return input . keys . f 5 ; case java . awt . event . key event . vk f 6 : return input . keys . f 6 ; case java . awt . event . key event . vk f 7 : return input . keys . f 7 ; case java . awt . event . key event . vk f 8 : return input . keys . f 8 ; case java . awt . event . key event . vk f 9 : return input . keys . f 9 ; case java . awt . event . key event . vk f 10 : return input . keys . f 10 ; case java . awt . event . key event . vk f 11 : return input . keys . f 11 ; case java . awt . event . key event . vk f 12 : return input . keys . f 12 ; case java . awt . event . key event . vk colon : return input . keys . colon ; case java . awt . event . key event . vk numpad 0 : return input . keys . num 0 ; case java . awt . event . key event . vk numpad 1 : return input . keys . num 1 ; case java . awt . event . key event . vk numpad 2 : return input . keys . num 2 ; case java . awt . event . key event . vk numpad 3 : return input . keys . num 3 ; case java . awt . event . key event . vk numpad 4 : return input . keys . num 4 ; case java . awt . event . key event . vk numpad 5 : return input . keys . num 5 ; case java . awt . event . key event . vk numpad 6 : return input . keys . num 6 ; case java . awt . event . key event . vk numpad 7 : return input . keys . num 7 ; case java . awt . event . key event . vk numpad 8 : return input . keys . num 8 ; case java . awt . event . key event . vk numpad 9 : return input . keys . num 9 ; } return input . keys . unknown ; } <SENTENCE_END/>

code

<SENTENCE_START> { switch ( key code ) { case java . awt . event . key event . vk add : return input . keys . plus ; case java . awt . event . key event . vk subtract : return input . keys . minus ; case java . awt . event . key event . vk 0 : return input . keys . num 0 ; case java . awt . event . key event . vk 1 : return input . keys . num 1 ; case java . awt . event . key event . vk 2 : return input . keys . num 2 ; case java . awt . event . key event . vk 3 : return input . keys . num 3 ; case java . awt . event . key event . vk 4 : return input . keys . num 4 ; case java . awt . event . key event . vk 5 : return input . keys . num 5 ; case java . awt . event . key event . vk 6 : return input . keys . num 6 ; case java . awt . event . key event . vk 7 : return input . keys . num 7 ; case java . awt . event . key event . vk 8 : return input . keys . num 8 ; case java . awt . event . key event . vk 9 : return input . keys . num 9 ; case java . awt . event . key event . vk a : return input . keys . a ; case java . awt . event . key event . vk b : return input . keys . b ; case java . awt . event . key event . vk c : return input . keys . c ; case java . awt . event . key event . vk d : return input . keys . d ; case java . awt . event . key event . vk e : return input . keys . e ; case java . awt . event . key event . vk f : return input . keys . f ; case java . awt . event . key event . vk g : return input . keys . g ; case java . awt . event . key event . vk h : return input . keys . h ; case java . awt . event . key event . vk i : return input . keys . i ; case java . awt . event . key event . vk j : return input . keys . j ; case java . awt . event . key event . vk k : return input . keys . k ; case java . awt . event . key event . vk l : return input . keys . l ; case java . awt . event . key event . vk m : return input . keys . m ; case java . awt . event . key event . vk n : return input . keys . n ; case java . awt . event . key event . vk o : return input . keys . o ; case java . awt . event . key event . vk p : return input . keys . p ; case java . awt . event . key event . vk q : return input . keys . q ; case java . awt . event . key event . vk r : return input . keys . r ; case java . awt . event . key event . vk s : return input . keys . s ; case java . awt . event . key event . vk t : return input . keys . t ; case java . awt . event . key event . vk u : return input . keys . u ; case java . awt . event . key event . vk v : return input . keys . v ; case java . awt . event . key event . vk w : return input . keys . w ; case java . awt . event . key event . vk x : return input . keys . x ; case java . awt . event . key event . vk y : return input . keys . y ; case java . awt . event . key event . vk z : return input . keys . z ; case java . awt . event . key event . vk alt : return input . keys . alt left ; case java . awt . event . key event . vk alt graph : return input . keys . alt right ; case java . awt . event . key event . vk back slash : return input . keys . backslash ; case java . awt . event . key event . vk comma : return input . keys . comma ; case java . awt . event . key event . vk delete : return input . keys . forward del ; case java . awt . event . key event . vk left : return input . keys . dpad left ; case java . awt . event . key event . vk right : return input . keys . dpad right ; case java . awt . event . key event . vk up : return input . keys . dpad up ; case java . awt . event . key event . vk down : return input . keys . dpad down ; case java . awt . event . key event . vk enter : return input . keys . enter ; case java . awt . event . key event . vk home : return input . keys . home ; case java . awt . event . key event . vk minus : return input . keys . minus ; case java . awt . event . key event . vk period : return input . keys . period ; case java . awt . event . key event . vk plus : return input . keys . plus ; case java . awt . event . key event . vk semicolon : return input . keys . semicolon ; case java . awt . event . key event . vk shift : return input . keys . shift left ; case java . awt . event . key event . vk slash : return input . keys . slash ; case java . awt . event . key event . vk space : return input . keys . space ; case java . awt . event . key event . vk tab : return input . keys . tab ; case java . awt . event . key event . vk back space : return input . keys . del ; case java . awt . event . key event . vk control : return input . keys . control left ; case java . awt . event . key event . vk escape : return input . keys . escape ; case java . awt . event . key event . vk end : return input . keys . end ; case java . awt . event . key event . vk insert : return input . keys . insert ; case java . awt . event . key event . vk page up : return input . keys . page up ; case java . awt . event . key event . vk page down : return input . keys . page down ; case java . awt . event . key event . vk f 1 : return input . keys . f 1 ; case java . awt . event . key event . vk f 2 : return input . keys . f 2 ; case java . awt . event . key event . vk f 3 : return input . keys . f 3 ; case java . awt . event . key event . vk f 4 : return input . keys . f 4 ; case java . awt . event . key event . vk f 5 : return input . keys . f 5 ; case java . awt . event . key event . vk f 6 : return input . keys . f 6 ; case java . awt . event . key event . vk f 7 : return input . keys . f 7 ; case java . awt . event . key event . vk f 8 : return input . keys . f 8 ; case java . awt . event . key event . vk f 9 : return input . keys . f 9 ; case java . awt . event . key event . vk f 10 : return input . keys . f 10 ; case java . awt . event . key event . vk f 11 : return input . keys . f 11 ; case java . awt . event . key event . vk f 12 : return input . keys . f 12 ; case java . awt . event . key event . vk colon : return input . keys . colon ; case java . awt . event . key event . vk numpad 0 : return input . keys . num 0 ; case java . awt . event . key event . vk numpad 1 : return input . keys . num 1 ; case java . awt . event . key event . vk numpad 2 : return input . keys . num 2 ; case java . awt . event . key event . vk numpad 3 : return input . keys . num 3 ; case java . awt . event . key event . vk numpad 4 : return input . keys . num 4 ; case java . awt . event . key event . vk numpad 5 : return input . keys . num 5 ; case java . awt . event . key event . vk numpad 6 : return input . keys . num 6 ; case java . awt . event . key event . vk numpad 7 : return input . keys . num 7 ; case java . awt . event . key event . vk numpad 8 : return input . keys . num 8 ; case java . awt . event . key event . vk numpad 9 : return input . keys . num 9 ; } return input . keys . unknown ; } <SENTENCE_END/>

(Copy Probability: 4.2%)

<SENTENCE_START> { switch ( key code ) { case java . awt . event . key event . vk add : return input . keys . plus ; case java . awt . event . key event . vk subtract : return input . keys . minus ; case java . awt . event . key event . vk 0 : return input . keys . num 0 ; case java . awt . event . key event . vk 1 : return input . keys . num 1 ; case java . awt . event . key event . vk 2 : return input . keys . num 2 ; case java . awt . event . key event . vk 3 : return input . keys . num 3 ; case java . awt . event . key event . vk 4 : return input . keys . num 4 ; case java . awt . event . key event . vk 5 : return input . keys . num 5 ; case java . awt . event . key event . vk 6 : return input . keys . num 6 ; case java . awt . event . key event . vk 7 : return input . keys . num 7 ; case java . awt . event . key event . vk 8 : return input . keys . num 8 ; case java . awt . event . key event . vk 9 : return input . keys . num 9 ; case java . awt . event . key event . vk a : return input . keys . a ; case java . awt . event . key event . vk b : return input . keys . b ; case java . awt . event . key event . vk c : return input . keys . c ; case java . awt . event . key event . vk d : return input . keys . d ; case java . awt . event . key event . vk e : return input . keys . e ; case java . awt . event . key event . vk f : return input . keys . f ; case java . awt . event . key event . vk g : return input . keys . g ; case java . awt . event . key event . vk h : return input . keys . h ; case java . awt . event . key event . vk i : return input . keys . i ; case java . awt . event . key event . vk j : return input . keys . j ; case java . awt . event . key event . vk k : return input . keys . k ; case java . awt . event . key event . vk l : return input . keys . l ; case java . awt . event . key event . vk m : return input . keys . m ; case java . awt . event . key event . vk n : return input . keys . n ; case java . awt . event . key event . vk o : return input . keys . o ; case java . awt . event . key event . vk p : return input . keys . p ; case java . awt . event . key event . vk q : return input . keys . q ; case java . awt . event . key event . vk r : return input . keys . r ; case java . awt . event . key event . vk s : return input . keys . s ; case java . awt . event . key event . vk t : return input . keys . t ; case java . awt . event . key event . vk u : return input . keys . u ; case java . awt . event . key event . vk v : return input . keys . v ; case java . awt . event . key event . vk w : return input . keys . w ; case java . awt . event . key event . vk x : return input . keys . x ; case java . awt . event . key event . vk y : return input . keys . y ; case java . awt . event . key event . vk z : return input . keys . z ; case java . awt . event . key event . vk alt : return input . keys . alt left ; case java . awt . event . key event . vk alt graph : return input . keys . alt right ; case java . awt . event . key event . vk back slash : return input . keys . backslash ; case java . awt . event . key event . vk comma : return input . keys . comma ; case java . awt . event . key event . vk delete : return input . keys . forward del ; case java . awt . event . key event . vk left : return input . keys . dpad left ; case java . awt . event . key event . vk right : return input . keys . dpad right ; case java . awt . event . key event . vk up : return input . keys . dpad up ; case java . awt . event . key event . vk down : return input . keys . dpad down ; case java . awt . event . key event . vk enter : return input . keys . enter ; case java . awt . event . key event . vk home : return input . keys . home ; case java . awt . event . key event . vk minus : return input . keys . minus ; case java . awt . event . key event . vk period : return input . keys . period ; case java . awt . event . key event . vk plus : return input . keys . plus ; case java . awt . event . key event . vk semicolon : return input . keys . semicolon ; case java . awt . event . key event . vk shift : return input . keys . shift left ; case java . awt . event . key event . vk slash : return input . keys . slash ; case java . awt . event . key event . vk space : return input . keys . space ; case java . awt . event . key event . vk tab : return input . keys . tab ; case java . awt . event . key event . vk back space : return input . keys . del ; case java . awt . event . key event . vk control : return input . keys . control left ; case java . awt . event . key event . vk escape : return input . keys . escape ; case java . awt . event . key event . vk end : return input . keys . end ; case java . awt . event . key event . vk insert : return input . keys . insert ; case java . awt . event . key event . vk page up : return input . keys . page up ; case java . awt . event . key event . vk page down : return input . keys . page down ; case java . awt . event . key event . vk f 1 : return input . keys . f 1 ; case java . awt . event . key event . vk f 2 : return input . keys . f 2 ; case java . awt . event . key event . vk f 3 : return input . keys . f 3 ; case java . awt . event . key event . vk f 4 : return input . keys . f 4 ; case java . awt . event . key event . vk f 5 : return input . keys . f 5 ; case java . awt . event . key event . vk f 6 : return input . keys . f 6 ; case java . awt . event . key event . vk f 7 : return input . keys . f 7 ; case java . awt . event . key event . vk f 8 : return input . keys . f 8 ; case java . awt . event . key event . vk f 9 : return input . keys . f 9 ; case java . awt . event . key event . vk f 10 : return input . keys . f 10 ; case java . awt . event . key event . vk f 11 : return input . keys . f 11 ; case java . awt . event . key event . vk f 12 : return input . keys . f 12 ; case java . awt . event . key event . vk colon : return input . keys . colon ; case java . awt . event . key event . vk numpad 0 : return input . keys . num 0 ; case java . awt . event . key event . vk numpad 1 : return input . keys . num 1 ; case java . awt . event . key event . vk numpad 2 : return input . keys . num 2 ; case java . awt . event . key event . vk numpad 3 : return input . keys . num 3 ; case java . awt . event . key event . vk numpad 4 : return input . keys . num 4 ; case java . awt . event . key event . vk numpad 5 : return input . keys . num 5 ; case java . awt . event . key event . vk numpad 6 : return input . keys . num 6 ; case java . awt . event . key event . vk numpad 7 : return input . keys . num 7 ; case java . awt . event . key event . vk numpad 8 : return input . keys . num 8 ; case java . awt . event . key event . vk numpad 9 : return input . keys . num 9 ; } return input . keys . unknown ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { switch ( key code ) { case java . awt . event . key event . vk add : return input . keys . plus ; case java . awt . event . key event . vk subtract : return input . keys . minus ; case java . awt . event . key event . vk 0 : return input . keys . num 0 ; case java . awt . event . key event . vk 1 : return input . keys . num 1 ; case java . awt . event . key event . vk 2 : return input . keys . num 2 ; case java . awt . event . key event . vk 3 : return input . keys . num 3 ; case java . awt . event . key event . vk 4 : return input . keys . num 4 ; case java . awt . event . key event . vk 5 : return input . keys . num 5 ; case java . awt . event . key event . vk 6 : return input . keys . num 6 ; case java . awt . event . key event . vk 7 : return input . keys . num 7 ; case java . awt . event . key event . vk 8 : return input . keys . num 8 ; case java . awt . event . key event . vk 9 : return input . keys . num 9 ; case java . awt . event . key event . vk a : return input . keys . a ; case java . awt . event . key event . vk b : return input . keys . b ; case java . awt . event . key event . vk c : return input . keys . c ; case java . awt . event . key event . vk d : return input . keys . d ; case java . awt . event . key event . vk e : return input . keys . e ; case java . awt . event . key event . vk f : return input . keys . f ; case java . awt . event . key event . vk g : return input . keys . g ; case java . awt . event . key event . vk h : return input . keys . h ; case java . awt . event . key event . vk i : return input . keys . i ; case java . awt . event . key event . vk j : return input . keys . j ; case java . awt . event . key event . vk k : return input . keys . k ; case java . awt . event . key event . vk l : return input . keys . l ; case java . awt . event . key event . vk m : return input . keys . m ; case java . awt . event . key event . vk n : return input . keys . n ; case java . awt . event . key event . vk o : return input . keys . o ; case java . awt . event . key event . vk p : return input . keys . p ; case java . awt . event . key event . vk q : return input . keys . q ; case java . awt . event . key event . vk r : return input . keys . r ; case java . awt . event . key event . vk s : return input . keys . s ; case java . awt . event . key event . vk t : return input . keys . t ; case java . awt . event . key event . vk u : return input . keys . u ; case java . awt . event . key event . vk v : return input . keys . v ; case java . awt . event . key event . vk w : return input . keys . w ; case java . awt . event . key event . vk x : return input . keys . x ; case java . awt . event . key event . vk y : return input . keys . y ; case java . awt . event . key event . vk z : return input . keys . z ; case java . awt . event . key event . vk alt : return input . keys . alt left ; case java . awt . event . key event . vk alt graph : return input . keys . alt right ; case java . awt . event . key event . vk back slash : return input . keys . backslash ; case java . awt . event . key event . vk comma : return input . keys . comma ; case java . awt . event . key event . vk delete : return input . keys . forward del ; case java . awt . event . key event . vk left : return input . keys . dpad left ; case java . awt . event . key event . vk right : return input . keys . dpad right ; case java . awt . event . key event . vk up : return input . keys . dpad up ; case java . awt . event . key event . vk down : return input . keys . dpad down ; case java . awt . event . key event . vk enter : return input . keys . enter ; case java . awt . event . key event . vk home : return input . keys . home ; case java . awt . event . key event . vk minus : return input . keys . minus ; case java . awt . event . key event . vk period : return input . keys . period ; case java . awt . event . key event . vk plus : return input . keys . plus ; case java . awt . event . key event . vk semicolon : return input . keys . semicolon ; case java . awt . event . key event . vk shift : return input . keys . shift left ; case java . awt . event . key event . vk slash : return input . keys . slash ; case java . awt . event . key event . vk space : return input . keys . space ; case java . awt . event . key event . vk tab : return input . keys . tab ; case java . awt . event . key event . vk back space : return input . keys . del ; case java . awt . event . key event . vk control : return input . keys . control left ; case java . awt . event . key event . vk escape : return input . keys . escape ; case java . awt . event . key event . vk end : return input . keys . end ; case java . awt . event . key event . vk insert : return input . keys . insert ; case java . awt . event . key event . vk page up : return input . keys . page up ; case java . awt . event . key event . vk page down : return input . keys . page down ; case java . awt . event . key event . vk f 1 : return input . keys . f 1 ; case java . awt . event . key event . vk f 2 : return input . keys . f 2 ; case java . awt . event . key event . vk f 3 : return input . keys . f 3 ; case java . awt . event . key event . vk f 4 : return input . keys . f 4 ; case java . awt . event . key event . vk f 5 : return input . keys . f 5 ; case java . awt . event . key event . vk f 6 : return input . keys . f 6 ; case java . awt . event . key event . vk f 7 : return input . keys . f 7 ; case java . awt . event . key event . vk f 8 : return input . keys . f 8 ; case java . awt . event . key event . vk f 9 : return input . keys . f 9 ; case java . awt . event . key event . vk f 10 : return input . keys . f 10 ; case java . awt . event . key event . vk f 11 : return input . keys . f 11 ; case java . awt . event . key event . vk f 12 : return input . keys . f 12 ; case java . awt . event . key event . vk colon : return input . keys . colon ; case java . awt . event . key event . vk numpad 0 : return input . keys . num 0 ; case java . awt . event . key event . vk numpad 1 : return input . keys . num 1 ; case java . awt . event . key event . vk numpad 2 : return input . keys . num 2 ; case java . awt . event . key event . vk numpad 3 : return input . keys . num 3 ; case java . awt . event . key event . vk numpad 4 : return input . keys . num 4 ; case java . awt . event . key event . vk numpad 5 : return input . keys . num 5 ; case java . awt . event . key event . vk numpad 6 : return input . keys . num 6 ; case java . awt . event . key event . vk numpad 7 : return input . keys . num 7 ; case java . awt . event . key event . vk numpad 8 : return input . keys . num 8 ; case java . awt . event . key event . vk numpad 9 : return input . keys . num 9 ; } return input . keys . unknown ; } <SENTENCE_END/>

(Copy Probability: 3.7%)

<SENTENCE_START> { switch ( key code ) { case java . awt . event . key event . vk add : return input . keys . plus ; case java . awt . event . key event . vk subtract : return input . keys . minus ; case java . awt . event . key event . vk 0 : return input . keys . num 0 ; case java . awt . event . key event . vk 1 : return input . keys . num 1 ; case java . awt . event . key event . vk 2 : return input . keys . num 2 ; case java . awt . event . key event . vk 3 : return input . keys . num 3 ; case java . awt . event . key event . vk 4 : return input . keys . num 4 ; case java . awt . event . key event . vk 5 : return input . keys . num 5 ; case java . awt . event . key event . vk 6 : return input . keys . num 6 ; case java . awt . event . key event . vk 7 : return input . keys . num 7 ; case java . awt . event . key event . vk 8 : return input . keys . num 8 ; case java . awt . event . key event . vk 9 : return input . keys . num 9 ; case java . awt . event . key event . vk a : return input . keys . a ; case java . awt . event . key event . vk b : return input . keys . b ; case java . awt . event . key event . vk c : return input . keys . c ; case java . awt . event . key event . vk d : return input . keys . d ; case java . awt . event . key event . vk e : return input . keys . e ; case java . awt . event . key event . vk f : return input . keys . f ; case java . awt . event . key event . vk g : return input . keys . g ; case java . awt . event . key event . vk h : return input . keys . h ; case java . awt . event . key event . vk i : return input . keys . i ; case java . awt . event . key event . vk j : return input . keys . j ; case java . awt . event . key event . vk k : return input . keys . k ; case java . awt . event . key event . vk l : return input . keys . l ; case java . awt . event . key event . vk m : return input . keys . m ; case java . awt . event . key event . vk n : return input . keys . n ; case java . awt . event . key event . vk o : return input . keys . o ; case java . awt . event . key event . vk p : return input . keys . p ; case java . awt . event . key event . vk q : return input . keys . q ; case java . awt . event . key event . vk r : return input . keys . r ; case java . awt . event . key event . vk s : return input . keys . s ; case java . awt . event . key event . vk t : return input . keys . t ; case java . awt . event . key event . vk u : return input . keys . u ; case java . awt . event . key event . vk v : return input . keys . v ; case java . awt . event . key event . vk w : return input . keys . w ; case java . awt . event . key event . vk x : return input . keys . x ; case java . awt . event . key event . vk y : return input . keys . y ; case java . awt . event . key event . vk z : return input . keys . z ; case java . awt . event . key event . vk alt : return input . keys . alt left ; case java . awt . event . key event . vk alt graph : return input . keys . alt right ; case java . awt . event . key event . vk back slash : return input . keys . backslash ; case java . awt . event . key event . vk comma : return input . keys . comma ; case java . awt . event . key event . vk delete : return input . keys . forward del ; case java . awt . event . key event . vk left : return input . keys . dpad left ; case java . awt . event . key event . vk right : return input . keys . dpad right ; case java . awt . event . key event . vk up : return input . keys . dpad up ; case java . awt . event . key event . vk down : return input . keys . dpad down ; case java . awt . event . key event . vk enter : return input . keys . enter ; case java . awt . event . key event . vk home : return input . keys . home ; case java . awt . event . key event . vk minus : return input . keys . minus ; case java . awt . event . key event . vk period : return input . keys . period ; case java . awt . event . key event . vk plus : return input . keys . plus ; case java . awt . event . key event . vk semicolon : return input . keys . semicolon ; case java . awt . event . key event . vk shift : return input . keys . shift left ; case java . awt . event . key event . vk slash : return input . keys . slash ; case java . awt . event . key event . vk space : return input . keys . space ; case java . awt . event . key event . vk tab : return input . keys . tab ; case java . awt . event . key event . vk back space : return input . keys . del ; case java . awt . event . key event . vk control : return input . keys . control left ; case java . awt . event . key event . vk escape : return input . keys . escape ; case java . awt . event . key event . vk end : return input . keys . end ; case java . awt . event . key event . vk insert : return input . keys . insert ; case java . awt . event . key event . vk page up : return input . keys . page up ; case java . awt . event . key event . vk page down : return input . keys . page down ; case java . awt . event . key event . vk f 1 : return input . keys . f 1 ; case java . awt . event . key event . vk f 2 : return input . keys . f 2 ; case java . awt . event . key event . vk f 3 : return input . keys . f 3 ; case java . awt . event . key event . vk f 4 : return input . keys . f 4 ; case java . awt . event . key event . vk f 5 : return input . keys . f 5 ; case java . awt . event . key event . vk f 6 : return input . keys . f 6 ; case java . awt . event . key event . vk f 7 : return input . keys . f 7 ; case java . awt . event . key event . vk f 8 : return input . keys . f 8 ; case java . awt . event . key event . vk f 9 : return input . keys . f 9 ; case java . awt . event . key event . vk f 10 : return input . keys . f 10 ; case java . awt . event . key event . vk f 11 : return input . keys . f 11 ; case java . awt . event . key event . vk f 12 : return input . keys . f 12 ; case java . awt . event . key event . vk colon : return input . keys . colon ; case java . awt . event . key event . vk numpad 0 : return input . keys . num 0 ; case java . awt . event . key event . vk numpad 1 : return input . keys . num 1 ; case java . awt . event . key event . vk numpad 2 : return input . keys . num 2 ; case java . awt . event . key event . vk numpad 3 : return input . keys . num 3 ; case java . awt . event . key event . vk numpad 4 : return input . keys . num 4 ; case java . awt . event . key event . vk numpad 5 : return input . keys . num 5 ; case java . awt . event . key event . vk numpad 6 : return input . keys . num 6 ; case java . awt . event . key event . vk numpad 7 : return input . keys . num 7 ; case java . awt . event . key event . vk numpad 8 : return input . keys . num 8 ; case java . awt . event . key event . vk numpad 9 : return input . keys . num 9 ; } return input . keys . unknown ; } <SENTENCE_END/>


Original Name show,cursor

show

<SENTENCE_START> { if ( ! visible ) { toolkit t = toolkit . get default toolkit ( ) ; image i = new buffered image ( 1 , 1 , buffered image . type int argb ) ; cursor no cursor = t . create custom cursor ( i , new point ( 0 , 0 ) , "none" ) ; j frame frame = find j frame ( canvas ) ; frame . set cursor ( no cursor ) ; } else { j frame frame = find j frame ( canvas ) ; frame . set cursor ( cursor . get default cursor ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 4.9%)

<SENTENCE_START> { if ( ! visible ) { toolkit t = toolkit . get default toolkit ( ) ; image i = new buffered image ( 1 , 1 , buffered image . type int argb ) ; cursor no cursor = t . create custom cursor ( i , new point ( 0 , 0 ) , "none" ) ; j frame frame = find j frame ( canvas ) ; frame . set cursor ( no cursor ) ; } else { j frame frame = find j frame ( canvas ) ; frame . set cursor ( cursor . get default cursor ( ) ) ; } } <SENTENCE_END/>

cursor

<SENTENCE_START> { if ( ! visible ) { toolkit t = toolkit . get default toolkit ( ) ; image i = new buffered image ( 1 , 1 , buffered image . type int argb ) ; cursor no cursor = t . create custom cursor ( i , new point ( 0 , 0 ) , "none" ) ; j frame frame = find j frame ( canvas ) ; frame . set cursor ( no cursor ) ; } else { j frame frame = find j frame ( canvas ) ; frame . set cursor ( cursor . get default cursor ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 25.0%)

<SENTENCE_START> { if ( ! visible ) { toolkit t = toolkit . get default toolkit ( ) ; image i = new buffered image ( 1 , 1 , buffered image . type int argb ) ; cursor no cursor = t . create custom cursor ( i , new point ( 0 , 0 ) , "none" ) ; j frame frame = find j frame ( canvas ) ; frame . set cursor ( no cursor ) ; } else { j frame frame = find j frame ( canvas ) ; frame . set cursor ( cursor . get default cursor ( ) ) ; } } <SENTENCE_END/>

%END%

<SENTENCE_START> { if ( ! visible ) { toolkit t = toolkit . get default toolkit ( ) ; image i = new buffered image ( 1 , 1 , buffered image . type int argb ) ; cursor no cursor = t . create custom cursor ( i , new point ( 0 , 0 ) , "none" ) ; j frame frame = find j frame ( canvas ) ; frame . set cursor ( no cursor ) ; } else { j frame frame = find j frame ( canvas ) ; frame . set cursor ( cursor . get default cursor ( ) ) ; } } <SENTENCE_END/>

(Copy Probability: 8.7%)

<SENTENCE_START> { if ( ! visible ) { toolkit t = toolkit . get default toolkit ( ) ; image i = new buffered image ( 1 , 1 , buffered image . type int argb ) ; cursor no cursor = t . create custom cursor ( i , new point ( 0 , 0 ) , "none" ) ; j frame frame = find j frame ( canvas ) ; frame . set cursor ( no cursor ) ; } else { j frame frame = find j frame ( canvas ) ; frame . set cursor ( cursor . get default cursor ( ) ) ; } } <SENTENCE_END/>


Original Name find,j,frame

find

<SENTENCE_START> { container parent = component . get parent ( ) ; while ( parent != null ) { if ( parent instanceof j frame ) { return ( j frame ) parent ; } parent = parent . get parent ( ) ; } return null ; } <SENTENCE_END/>

(Copy Probability: 1.8%)

<SENTENCE_START> { container parent = component . get parent ( ) ; while ( parent != null ) { if ( parent instanceof j frame ) { return ( j frame ) parent ; } parent = parent . get parent ( ) ; } return null ; } <SENTENCE_END/>

j

<SENTENCE_START> { container parent = component . get parent ( ) ; while ( parent != null ) { if ( parent instanceof j frame ) { return ( j frame ) parent ; } parent = parent . get parent ( ) ; } return null ; } <SENTENCE_END/>

(Copy Probability: 14.7%)

<SENTENCE_START> { container parent = component . get parent ( ) ; while ( parent != null ) { if ( parent instanceof j frame ) { return ( j frame ) parent ; } parent = parent . get parent ( ) ; } return null ; } <SENTENCE_END/>

frame

<SENTENCE_START> { container parent = component . get parent ( ) ; while ( parent != null ) { if ( parent instanceof j frame ) { return ( j frame ) parent ; } parent = parent . get parent ( ) ; } return null ; } <SENTENCE_END/>

(Copy Probability: 3.8%)

<SENTENCE_START> { container parent = component . get parent ( ) ; while ( parent != null ) { if ( parent instanceof j frame ) { return ( j frame ) parent ; } parent = parent . get parent ( ) ; } return null ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { container parent = component . get parent ( ) ; while ( parent != null ) { if ( parent instanceof j frame ) { return ( j frame ) parent ; } parent = parent . get parent ( ) ; } return null ; } <SENTENCE_END/>

(Copy Probability: 3.3%)

<SENTENCE_START> { container parent = component . get parent ( ) ; while ( parent != null ) { if ( parent instanceof j frame ) { return ( j frame ) parent ; } parent = parent . get parent ( ) ; } return null ; } <SENTENCE_END/>


Original Name load,model

load

<SENTENCE_START> { return %SELF% ( file handle , new obj loader parameters ( flip v ) ) ; } <SENTENCE_END/>

(Copy Probability: 0.8%)

<SENTENCE_START> { return %SELF% ( file handle , new obj loader parameters ( flip v ) ) ; } <SENTENCE_END/>

model

<SENTENCE_START> { return %SELF% ( file handle , new obj loader parameters ( flip v ) ) ; } <SENTENCE_END/>

(Copy Probability: 3.1%)

<SENTENCE_START> { return %SELF% ( file handle , new obj loader parameters ( flip v ) ) ; } <SENTENCE_END/>

%END%

<SENTENCE_START> { return %SELF% ( file handle , new obj loader parameters ( flip v ) ) ; } <SENTENCE_END/>

(Copy Probability: 2.8%)

<SENTENCE_START> { return %SELF% ( file handle , new obj loader parameters ( flip v ) ) ; } <SENTENCE_END/>