#### #### this code is for face manipulation experiment #### the face is subject's face or an average face, which moves and receive the visual and tactile information #### synchronously or asynchronously with the real face movement. the face is smily or neutral. #### pre and post the manipulation, the affectgrid and ios will be measured, and post the manipulation, #### questionnaires will be filled. import viz import hand import viztask import vizinput import numpy import vizmat from numpy import binary_repr import vizfx.postprocess from vizfx.postprocess.color import ContrastEffect ########################################## add models ##################################### #### Debugging variable to speed up the experiment debugging = 1 #### Definition of the questions and the question-instructions questions4neutralface = ["I felt like the face on the screen was my own face",# "It seemed like I was looking at my own reflection\nin a mirror",# "It seemed like I were sensing the movement\nand the touch on my face in the location where\nthe face on the screen was",# "It seemed like the touch I felt on my face was caused\nby the ball touching the face on the screen",# "It seemed like the face on the screen began to\nresemble my own face",# "It seemed like my own face began to resemble\nthe face on the screen",# #add several agency questions, refer to dissociate agency and ownership "It seemed as though the movement I did\nwas caused by the face on the screen",# "It seemed as though the movement\nI saw on the face on the screen was caused\nby my own movement",# "The face on the screen moved just like I wanted\nit to, as if it was obeying my will", "Whenever I moved my face, I expected the face\non the screen to move in the same way", "It seemed like my own face was out of my control",# "It seemed the face on the screen had a will\nof its own", #feeling questions "I felt I am happier than I was before the manipulation"] questions4happyface = ["I felt like the face on the screen was my own face",# "It seemed like I was looking at my own reflection\nin a mirror",# "It seemed like I were sensing the movement\nand the touch on my face in the location where\nthe face on the screen was",# "It seemed like the touch I felt on my face was caused\nby the ball touching the face on the screen",# "It seemed like the face on the screen began to\nresemble my own face",# "It seemed like my own face began to resemble\nthe face on the screen",# #add several agency questions, refer to dissociate agency and ownership "It seemed as though the movement I did\nwas caused by the face on the screen",# "It seemed as though the movement\nI saw on the face on the screen was caused\nby my own movement",# "The face on the screen moved just like I wanted\nit to, as if it was obeying my will", "Whenever I moved my face, I expected the face\non the screen to move in the same way", "It seemed like my own face was out of my control",# "It seemed the face on the screen had a will\nof its own", #feeling questions "I felt I am happier than before the manipulation", "I felt that I smiled so the face on the screen smiled", "I felt that the face on the screen smiled so I smiled"] qinstructions = '\n' + 'Please select a number between 1 and 7' + '\n\n' + '1 means you disagree completely\n2 means you disagree\n3 means you disagree slightly\n4 means you are not sure\n5 means you agree slightly\n6 means you agree\n7 means you agree completely' #### similarity:0-my face(H);1-average face(S); #### synchronicity:0-sync(S);1-async(AS); #### event:0-no smile;1-smile; subjectnumberstr = vizinput.input('What is your subject number?'); subjectnumber = int(subjectnumberstr) #subjectnumber = 7 ####gender = 0 - male #### gender = 1 - female gender = vizinput.choose('Select your gender',['male','female']); if gender == 0: gendergender = 'male' if gender == 1: gendergender = 'female' if subjectnumber%8 == 1: #### my face - no smile - sync similarity = 0; smileornot = 0; synchronicitys = [0,1]; if subjectnumber%8 == 2: #### my face - no smile - async similarity = 0; smileornot = 0; synchronicitys = [1,0]; if subjectnumber%8 == 3: #### my face - smile - sync similarity = 0; smileornot = 1; synchronicitys = [0,1]; if subjectnumber%8 == 4: #### my face - smile - async similarity = 0; smileornot = 1; synchronicitys = [1,0]; if subjectnumber%8 == 5: #### average face - no smile - sync similarity = 1; smileornot = 0; synchronicitys = [0,1]; if subjectnumber%8 == 6: #### average face - no smile - async similarity = 1; smileornot = 0; synchronicitys = [1,0]; if subjectnumber%8 == 7: #### average face - smile - sync similarity = 1; smileornot = 1; synchronicitys = [0,1]; if subjectnumber%8 == 0: #### average face - smile - async similarity = 1; smileornot = 1; synchronicitys = [1,0]; ####add face in exp if (similarity == 0): if (smileornot == 0): #### my face + no smile headinexp = viz.add('art/participant/participant1_1.WRL') if (smileornot == 1): #### my face + smile headinexp = viz.add('art/participant/participant1_smile.WRL') headinexp.setScale(0.0004,0.0004,0.0004) headinexp.setPosition(0,0,0) headinexp.setEuler(180,0,0) hatinexp = viz.addChild('art/hat 2.dae') hatinexp.setScale(0.043,0.043,0.06) hatinexp.setPosition(0,0,0) hatinexp.color([0.3,0.3,0.6]) HatLink = viz.link(headinexp,hatinexp) HatLink.preTrans( [0.01,0.33,-0.04] ) HatLink.preEuler( [180,0,0] ) effect = ContrastEffect(0) if (similarity == 1): if (smileornot == 0): #### average face + no smile if gender == 0: headinexp = viz.add('art/belgianman/belgianman001.WRL') if gender == 1: headinexp = viz.add('art/belgianwoman/belgianwoman1.WRL') if (smileornot == 1): #### average face + smile if gender == 0: headinexp = viz.add('art/belgianman/belgianman001_smile.WRL') if gender == 1: headinexp = viz.add('art/belgianwoman/belgianwoman1_smile.WRL') headinexp.setScale(0.0011,0.0011,0.0011) headinexp.setPosition(0,0,0) headinexp.setEuler(180,0,0) hatinexp = viz.addChild('art/hat 2.dae') hatinexp.setScale(0.043,0.043,0.06) hatinexp.setPosition(0,0,0) hatinexp.color([0.3,0.3,0.6]) HatLink = viz.link(headinexp,hatinexp) HatLink.preTrans( [0,0.33,-0.02] ) HatLink.preEuler( [180,0,0] ) effect = ContrastEffect(0.3) vizfx.postprocess.addEffect(effect) headinexp.visible(viz.OFF) hatinexp.visible(viz.OFF) ####add face in practice headinpractice = viz.add('ball.wrl'); ballpic = viz.addTexture('art/pictexture.jpg') headinpractice.texture(ballpic) headinpractice.setScale(0.7,1,1) headinpractice.setPosition(0,0,0) headinpractice.setEuler(180,0,0) hatinpractice = viz.addChild('art/hat 2.dae') hatinpractice.setScale(0.04,0.04,0.055) hatinpractice.setPosition(0,0,0) hatinpractice.color([0.3,0.3,0.6]) HatLink = viz.link(headinpractice,hatinpractice) HatLink.preTrans( [0,0.33,0.05] ) HatLink.preEuler( [180,-10,0] ) headinpractice.visible(viz.OFF) hatinpractice.visible(viz.OFF) #### add hand righthand = viz.add('white_ball.wrl',scale = [0.7,0.7,0.7],color = [0,0.5,1]) righthand.visible(viz.OFF) ####add kinect HEAD = 0 ### RIGHTHAND = 14 ### vrpn = viz.addExtension('vrpn7.dle') thead = vrpn.addTracker( 'Tracker0@localhost',HEAD ) trighthand= vrpn.addTracker( 'Tracker0@localhost',RIGHTHAND ) ####add intersense isense = viz.add('intersense.dle') tracker_head = isense.addTracker(port=7) ####if changed, remember to change the port number vizact.onkeydown (' ', tracker_head.reset) #### Preparing the subject's logging file file = open('response ' +str(subjectnumber) + '_' + gendergender+ '.txt', 'w'); file.close() #### Defining the instructions text_dict = {} for kind in ['instructions']: text = viz.addText('', viz.SCREEN ); text.setScale( .4,.4); text.alignment( viz.TEXT_LEFT_TOP ); text_dict[ kind ] = text text_dict['instructions'].setPosition( 0.07,0.93 ) ####################################### START ########################################### viz.setMultiSample(4); viz.fov(60); viz.go(viz.FULLSCREEN); viz.mouse( viz.OFF ) #viz.go() ######################################## Tasks ###################################### def instructions(): text = text_dict[ 'instructions' ] text.message('In the following task, you will try face manipulation.\nPlease press space to go on') yield viztask.waitKeyDown(' '); text.message('Please wear the hat, press space to go on\nthen please listen to instructions from experimenter') yield viztask.waitKeyDown(' '); screen = viz.addTexQuad(pos=[0,0,0], scale = [0.7,0.7,0.7], euler = [0,0,0]); backpicture = viz.addTexture('affectgrid-1.jpg'); screen.texture(backpicture) text.message('How\ndo\nyou\nfeel\nnow?\n\n\nPlease\npress\nspace\nto\ngo on') yield viztask.waitKeyDown(' '); screen.alpha(0); screen = viz.addTexQuad(pos=[0,0,0], scale = [0.4,0.73,0.7], euler = [0,0,0]); backpicture = viz.addTexture('IOS.jpg'); screen.texture(backpicture) text.message('Self\nother\nCloseness.\n\n\nPlease\npress\nspace\nto\ngo on') yield viztask.waitKeyDown(' '); screen.alpha(0); text.message('So we start the practice.\nPlease press space to go on') yield viztask.waitKeyDown(' '); text.message('') def synchronousmove(block,headinuse,hat): text.message('Please keep looking ahead and then press space key') yield viztask.waitKeyDown(' '); text.message('') ######~~~~~~~~~~~~~~~~~~~~~~~~~~~~for head euler(orientation tracker) def gettrackereuler(): yaw2,pitch2,roll2 = tracker_head.getEuler() markpitch2 = pitch2+40; markyaw2 = 180-yaw2; markroll2 = -roll2+10; # if (similarity == 0): # markroll2 = -roll2+7; raweuler2 = [markyaw2,markpitch2,markroll2] headinuse.setEuler(raweuler2) vibratetimer_gettrackereuler = vizact.ontimer(0, gettrackereuler) ######~~~~~~~~~~~~~~~~~~~~~~~~~~~~for head/righthand position(Kinect/faast) def trackheadrighthandpotion(): pos_righthand = trighthand.getPosition() pos_righthand[1] = pos_righthand[1]-0.5 pos_righthand[2] = pos_righthand[2]-1.2 righthand.setPosition(pos_righthand) #### pos_head = thead.getPosition() pos_head[1] = pos_head[1]-0.5 pos_head[2] = pos_head[2]-1.2 headinuse.setPosition(pos_head) timer_headrighthandpotion = vizact.ontimer(0,trackheadrighthandpotion) ######~~~~~~~~~~~~~~~~~~~~~~~~~~~~after experienment, close timer text.message('You can totate or move your head freely') headinuse.visible(viz.ON) hat.visible(viz.ON) if block == -1: # yield viztask.waitTime( 0.5*60*debugging ) ####in practice, 0.5*60 seconds yield viztask.waitKeyDown(' '); if block == 1: # yield viztask.waitTime( 3*60*debugging ) ####in exp, 3*60 seconds yield viztask.waitKeyDown(' '); headinuse.visible(viz.OFF) hat.visible(viz.OFF) text.message('Please move your right hand to right and back\nto touch your right cheek repeatedly\nPlease press space to check the movement on the screen') yield viztask.waitKeyDown(' '); text.message('Please keep moving your right hand to right and back\nto touch your right cheek repeatedly') headinuse.visible(viz.ON) hat.visible(viz.ON) righthand.visible(viz.ON) if block == -1: # yield viztask.waitTime( 0.5*60*debugging ) ####in practice, 0.5*60 seconds yield viztask.waitKeyDown(' '); if block == 1: # yield viztask.waitTime( 3*60*debugging ) ####in exp, 3*60 seconds yield viztask.waitKeyDown(' '); vibratetimer_gettrackereuler.setEnabled(viz.OFF) timer_headrighthandpotion.setEnabled(viz.OFF) headinuse.visible(viz.OFF) hat.visible(viz.OFF) righthand.visible(viz.OFF) def asynchronousmove(block,headinuse,hat): text.message('Please keep looking ahead and then press space key') yield viztask.waitKeyDown(' '); text.message('') ######~~~~~~~~~~~~~~~~~~~~~~~~~~~~for head euler(orientation tracker) start_euler = [180,0,0]; Data_delay_eul2 = [start_euler]*300; def gettrackereuler(): yaw2,pitch2,roll2 = tracker_head.getEuler() markpitch2 = pitch2+40; markyaw2 = 180-yaw2; markroll2 = -roll2+10; # if (similarity == 0): # markroll2 = -roll2+7; raweuler2 = [markyaw2,markpitch2,markroll2] Data_delay_eul2.append(raweuler2) neweuler2 = Data_delay_eul2.pop(0) headinuse.setEuler(neweuler2) vibratetimer_gettrackereuler = vizact.ontimer(0, gettrackereuler) ######~~~~~~~~~~~~~~~~~~~~~~~~~~~~for head position(Kinect/faast) start_pos_head = thead.getPosition() start_pos_head[1] = start_pos_head[1]-0.5 start_pos_head[2] = start_pos_head[2]-1.2 Data_delay_pos_head = [start_pos_head]*300; def trackheadrighthandpotion(): pos_righthand = trighthand.getPosition() pos_righthand[1] = pos_righthand[1]-0.5 pos_righthand[2] = pos_righthand[2]-1.2 pos_head = thead.getPosition() pos_head[1] = pos_head[1]-0.5 pos_head[2] = pos_head[2]-1.2 #### checkdistance = vizmat.Distance(pos_righthand[0], pos_head[0]) # print checkdistance newpose_righthand = pos_righthand; newpose_righthand[0] = 0.67-pos_righthand[0] righthand.setPosition(newpose_righthand) Data_delay_pos_head.append(pos_head) newpose_head = Data_delay_pos_head.pop(0) headinuse.setPosition(newpose_head) timer_headrighthandpotion = vizact.ontimer(0,trackheadrighthandpotion) ######~~~~~~~~~~~~~~~~~~~~~~~~~~~~after experienment, close timer text.message('You can totate or move your head freely') headinuse.visible(viz.ON) hat.visible(viz.ON) if block == -1: # yield viztask.waitTime( 0.5*60*debugging ) ####in practice, 0.5*60 seconds yield viztask.waitKeyDown(' '); if block == 1: # yield viztask.waitTime( 3*60*debugging ) ####in exp, 3*60 seconds yield viztask.waitKeyDown(' '); headinuse.visible(viz.OFF) hat.visible(viz.OFF) text.message('Please move your right hand to right and back\nto touch your right cheek repeatedly\nPlease press space to check the movement on the screen') yield viztask.waitKeyDown(' '); text.message('Please keep moving your right hand to right and back\nto touch your right cheek repeatedly') headinuse.visible(viz.ON) hat.visible(viz.ON) righthand.visible(viz.ON) if block == -1: # yield viztask.waitTime( 0.5*60*debugging ) ####in practice, 0.5*60 seconds yield viztask.waitKeyDown(' '); if block == 1: # yield viztask.waitTime( 3*60*debugging ) ####in exp, 3*60 seconds yield viztask.waitKeyDown(' '); vibratetimer_gettrackereuler.setEnabled(viz.OFF) timer_headrighthandpotion.setEnabled(viz.OFF) headinuse.visible(viz.OFF) hat.visible(viz.OFF) righthand.visible(viz.OFF) def practice(block): text = text_dict[ 'instructions' ] text.message('In Practice 1 & 2: \nYou will see a color ball on the screen, please look at\nthis ball and freely move or rotate your head.\nThis section will lasts for 2 minute.\nPlease press space key to move to Practice 1.') yield viztask.waitKeyDown(' '); yield synchronousmove(block,headinpractice,hatinpractice) text.message('practice 1 is finished,\nplease press space key to move to Practice 2') yield viztask.waitKeyDown(' '); yield asynchronousmove(block,headinpractice,hatinpractice) text.message('practice 2 is finished,\nplease press space key to move to Experiment') yield viztask.waitKeyDown(' '); def game_illusion(block, synchronicity): text = text_dict[ 'instructions' ] if block == 0: text.message('Please look at this face on the screen,\ndo not move your head.\nThis section will lasts for 1 minutes.\nPlease press space to go on') if block == 1: text.message('Please look at this face on the screen,\nkeep rotating or moving you face.\nThis section will lasts for several minutes.\nPlease press space to go on') yield viztask.waitKeyDown(' '); text.message('') if block == 0: headinexp.visible(viz.ON) hatinexp.visible(viz.ON) # yield viztask.waitTime( 1*60*debugging ) yield viztask.waitKeyDown(' '); headinexp.visible(viz.OFF) hatinexp.visible(viz.OFF) text.message('Please press space to do the tasks') yield viztask.waitKeyDown(' '); if block > 0: if synchronicity == 0: yield synchronousmove(block,headinexp,hatinexp) if synchronicity == 1: yield asynchronousmove(block,headinexp,hatinexp) text.message('Please press space to do the tasks') yield viztask.waitKeyDown(' '); def measureaffectgrid(block): screen = viz.addTexQuad(pos=[0,0,0], scale = [0.7,0.7,0.7], euler = [0,0,0]); screen.alpha(1); file = open('response ' +str(subjectnumber) + '_' + gendergender+ '.txt', 'a'); text = text_dict[ 'instructions' ] file.write(str(subjectnumber) + '\t' + 'afg' + '\t' + str(block) + '\t') #### Adding subjectnumber at beginning of line PP\tsimilarity\tpassiveoractive\tsync\ ####record the point he use mouse to pointed, the time it costs him to point # text.message('measureaffectgrid') if block == -1: ####pre backpicture = viz.addTexture('Affectgrid-1.jpg'); if block == 0: ####pre backpicture = viz.addTexture('Affectgrid0.jpg'); if block == 1: ####post backpicture = viz.addTexture('Affectgrid1.jpg'); if block == 2: ####post backpicture = viz.addTexture('Affectgrid2.jpg'); screen.texture(backpicture) text.message('How\ndo\nyou\nfeel\nnow?') def SaveData(key): global out out = key + '\t' viz.callback(viz.KEYBOARD_EVENT, SaveData) #### Creating the call-back yield viztask.waitAny( [ viztask.waitKeyDown('a'),viztask.waitKeyDown('b'),viztask.waitKeyDown('c'),viztask.waitKeyDown('d'),viztask.waitKeyDown('e'),viztask.waitKeyDown('f'),viztask.waitKeyDown('g'),viztask.waitKeyDown('h'),viztask.waitKeyDown('i')]) #### Wating for valid key-press, before skipping to next question file.write(out) viz.callback(viz.KEYBOARD_EVENT, None) #### Deleting the call-back ####only a,b,c,d,e,f,g,h,i can be read viz.callback(viz.KEYBOARD_EVENT, SaveData) #### Creating the call-back yield viztask.waitAny( [ viztask.waitKeyDown('1'),viztask.waitKeyDown('2'),viztask.waitKeyDown('3'),viztask.waitKeyDown('4'),viztask.waitKeyDown('5'),viztask.waitKeyDown('6'),viztask.waitKeyDown('7'),viztask.waitKeyDown('8'),viztask.waitKeyDown('9')]) #### Wating for valid key-press, before skipping to next question file.write(out) viz.callback(viz.KEYBOARD_EVENT, None) #### Deleting the call-back ####only 1,2,3,4,5,6,7,8,9 can be read file.write('\n\n') file.close() text.message('') screen.alpha(0) def measureios(block): screen = viz.addTexQuad(pos=[0,0,0], scale = [0.4,0.73,0.7], euler = [0,0,0]); screen.alpha(1); file = open('response ' +str(subjectnumber) + '_' + gendergender+ '.txt', 'a'); text = text_dict[ 'instructions' ] file.write(str(subjectnumber) + '\t' + 'ios' + '\t' + str(block) + '\t') #### Adding subjectnumber at beginning of line PP\tsimilarity\tpassiveoractive\tsync\ # text.message('measure IOS') backpicture = viz.addTexture('IOS.jpg'); screen.texture(backpicture) text.message('Self\nother\nCloseness.\n\n\nPlease select\nthe circle\nshows the\nrelationship\nbetween\nyou and\nthe face\nyou saw\non the screen') def SaveData(key): global out out = key + '\t' viz.callback(viz.KEYBOARD_EVENT, SaveData) #### Creating the call-back yield viztask.waitAny( [ viztask.waitKeyDown('1'),viztask.waitKeyDown('2'),viztask.waitKeyDown('3'),viztask.waitKeyDown('4'),viztask.waitKeyDown('5'),viztask.waitKeyDown('6'),viztask.waitKeyDown('7')]) #### Wating for valid key-press, before skipping to next question viz.callback(viz.KEYBOARD_EVENT, None) #### Deleting the call-back file.write(out) file.write('\n\n') file.close() text.message(''); screen.alpha(0) #def measureME(block): # screen = viz.addTexQuad(pos=[0,0,0], scale = [0.4,0.73,0.7], euler = [0,0,0]); # screen.alpha(1); # file = open('response ' +str(subjectnumber) + '_' + gendergender+ '.txt', 'a'); # text = text_dict[ 'instructions' ] # file.write(str(subjectnumber) + '\t' + 'ME' + '\t' + str(block) + '\t') #### Adding subjectnumber at beginning of line PP\tsimilarity\tpassiveoractive\tsync\ ## text.message('measure IOS') # # text.message('Self\nevaluation\nselect.\n\n\n') # def SaveData(key): # global out # out = key + '\t' # viz.callback(viz.KEYBOARD_EVENT, SaveData) #### Creating the call-back # yield viztask.waitAny( [ viztask.waitKeyDown('1'),viztask.waitKeyDown('2'),viztask.waitKeyDown('3'),viztask.waitKeyDown('4'),viztask.waitKeyDown('5'),viztask.waitKeyDown('6'),viztask.waitKeyDown('7')]) #### Wating for valid key-press, before skipping to next question # # viz.callback(viz.KEYBOARD_EVENT, None) #### Deleting the call-back # file.write(out) # file.write('\n\n') # file.close() # text.message(''); # screen.alpha(0) def measureaut(): text = text_dict[ 'instructions' ] text.message('Now you can ask the experimenter for materials.\nYou have five minutes to fill it.\nWhen you hear a sound, please stop.') yield viztask.waitTime( 1*debugging ) sound = viz.addAudio('BUZZER.WAV') sound.play() text.message('Now you can give the material to the experimenter\nand please press space key to continue.') yield viztask.waitKeyDown(' '); sound.stop() text.message(''); def showquestion(question, qnumber): file = open('response ' +str(subjectnumber) + '_' + gendergender+ '.txt', 'a'); text = text_dict[ 'instructions' ] text.message(question) def SaveData(key): global out out = key + '\t' viz.callback(viz.KEYBOARD_EVENT, SaveData) #### Creating the call-back yield viztask.waitAny( [ viztask.waitKeyDown('1'),viztask.waitKeyDown('2'),viztask.waitKeyDown('3'),viztask.waitKeyDown('4'),viztask.waitKeyDown('5'),viztask.waitKeyDown('6'),viztask.waitKeyDown('7')]) #### Wating for valid key-press, before skipping to next question viz.callback(viz.KEYBOARD_EVENT, None) #### Deleting the call-back def questionnaire(similarity, synchronicity, smileornot): file = open('response ' +str(subjectnumber) + '_' + gendergender+ '.txt', 'a'); #### Opening the log-file file.write('#PP\tsim\tsync\tev\tq1\tq2\tq3\tq4\tq5\tq6\tq7\tq8\tq9\tq10\tq11\tq12\tq13\tq14\tq15\n'); file.write(str(subjectnumber) + '\t' + str(similarity) + '\t' + str(synchronicity) + '\t' + str(smileornot) + '\t') #### Adding subjectnumber at beginning of line PP\tsimilarity\tpassiveoractive\tsync\ # random.shuffle(questions4object) ####why we do not use 'shuffle'? because it is difficult to record the sequence and sort the right rank for the right question if smileornot == 0: questions = questions4neutralface if smileornot == 1: questions = questions4happyface for qnumber, question in enumerate(questions): yield showquestion('Question ' + str(qnumber+1) + ': ' + question + '\n' +qinstructions, qnumber+1) #### Running through the questions and adding the pressed key to the log-file file.write(out) #### Adding the line break at the end of the block file.write('\n\n') file.close() text.message('You have finished the questionnaires, \nnow please press space key to go on') yield viztask.waitKeyDown(' '); text.message(''); def quit(): text = text_dict[ 'instructions' ] text.message( 'The experiment is finished, thank you.' ) yield viztask.waitTime( 2 ) viz.quit() def main_sequence(): # block = -1 # yield instructions() # yield measureaffectgrid(block) ####pre-1, baseline emotion # yield practice(block) ####to let participants know there are sync and async conditions 0.5*60*2*0.8 = 48s # block = 0 # yield game_illusion(block, 0) ####static face, to let participant compare the face with their own face 1*60*0.8 = 48s # yield measureaffectgrid(block) ####pre0, emotion after practice and static face, before exp # yield measureios(block) block = 1 subblock = 0 for synchronicity in synchronicitys: yield game_illusion(block, synchronicity) ####moveable face, to let participant get visuomotor and visuotactile stimuli 3*60*2*0.8 = 48*6s yield measureaffectgrid(block) ####post1, emotion after exp 1, after exp 2 yield measureios(block) # yield measureME(block) yield questionnaire(similarity, synchronicity, smileornot) if subblock == 0: yield measureaut() #### 5*60*0.8 = 48*5s yield measureaffectgrid(2) ####post2, emotion after AUT, before exp 2 subblock = subblock+1; yield quit() viztask.schedule( main_sequence() ) import vizcam cam = vizcam.PivotNavigate() viz.MainView.setEuler(0,0,0) cam.setCenter(0,0,0) cam.setDistance(0.7)