How to use the psychopy.visual.GratingStim function in psychopy

To help you get started, we’ve selected a few psychopy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github psychopy / psychopy / psychopy / demos / coder / iohub / delaytest / run.py View on Github external
instr = visual.TextStim(win=self.psychoWindow,
                                text='Move the mouse around, press keyboard keys and mouse buttons',
                                pos = [0,-125], height=32, color=[-1,-1,-1],
                                colorSpace='rgb', wrapWidth=800.0)

        self.psychoStim['static'] = visual.BufferImageStim(win=self.psychoWindow,
                                         stim=(fixation, title, instr))
        self.psychoStim['grating'] = visual.PatchStim(self.psychoWindow,
                                        mask="circle", size=75,pos=[-100,0],
                                        sf=.075)
        self.psychoStim['keytext'] = visual.TextStim(win=self.psychoWindow,
                                        text='key', pos = [0,300], height=48,
                                        color=[-1,-1,-1], colorSpace='rgb',
                                        wrapWidth=800.0)
        self.psychoStim['mouseDot'] = visual.GratingStim(win=self.psychoWindow,
                                        tex=None, mask="gauss",
                                        pos=currentPosition,size=(50,50),
                                        color='purple')
        self.psychoStim['progress'] = visual.ShapeStim(win=self.psychoWindow,
                                        vertices=[(0,0),(0,0),(0,0),(0,0)],
                                        pos=(400, -300))
github psychopy / psychopy / psychopy / demos / coder / timing / timeByFrames.py View on Github external
"""

from __future__ import division
from __future__ import print_function

from builtins import range
from psychopy import visual, logging, core, event
visual.useFBO = True  # if available (try without for comparison)

import matplotlib
matplotlib.use('Qt5Agg')  # change this to control the plotting 'back end'
import pylab

nIntervals = 500
win = visual.Window([1280, 1024], fullscr=True, allowGUI=False, waitBlanking=True)
progBar = visual.GratingStim(win, tex=None, mask=None,
    size=[0, 0.05], color='red', pos=[0, -0.9], autoLog=False)
myStim = visual.GratingStim(win, tex='sin', mask='gauss',
    size=300, sf=0.05, units='pix', autoLog=False)
# logging.console.setLevel(logging.INFO)# uncomment to log every frame

win.recordFrameIntervals = True
for frameN in range(nIntervals + 1):
    progBar.setSize([2.0 * frameN/nIntervals, 0.05])
    progBar.draw()
    myStim.setPhase(0.1, '+')
    myStim.draw()
    if event.getKeys():
        print('stopped early')
        break
    win.logOnFlip(msg='frame=%i' %frameN, level=logging.EXP)
    win.flip()
github lindeloev / psychopy-course / ppc_template.py View on Github external
# Save input variables in "V" dictionary (V for "variables")
V = {'subject':'', 'condition': ['trueFix', 'falseFix'], 'age':'', 'gender':['male', 'female']}
if not gui.DlgFromDict(V, order=['subject', 'age', 'gender']).OK:
    core.quit()

# Stuff
clock = core.Clock()  # A clock wich will be used throughout the experiment to time events on a trial-per-trial basis (stimuli and reaction times).
writer = ppc.csvWriter(str(V['subject']), saveFolder=SAVE_FOLDER)  # writer.write(trial) will write individual trials with low latency

# Create psychopy window
my_monitor = monitors.Monitor('testMonitor', width=MON_WIDTH, distance=MON_DISTANCE)  # Create monitor object from the variables above. This is needed to control size of stimuli in degrees.
my_monitor.setSizePix(MON_SIZE)
win = visual.Window(monitor=my_monitor, units='deg', fullscr=True, allowGUI=False, color='black')  # Initiate psychopy Window as the object "win", using the myMon object from last line. Use degree as units!

# Stimuli.
stim_gabor = visual.GratingStim(win, mask='gauss', sf=GABOR_SF, size=GABOR_SIZE)  # A gabor patch. Again, units are inherited.
stim_fix = visual.TextStim(win, '+', height=FIX_HEIGHT)  # Fixation cross is just the character "+". Units are inherited from Window when not explicitly specified.
stim_text = visual.TextStim(win, pos=MESSAGE_POS, height=MESSAGE_HEIGHT, wrapWidth=999)  # Message / question stimulus. Will be used to display instructions and questions.
sound_success = sound.Sound('C', secs=0.1, octave=6)  # Obs, ppc.Sound() is much more accurate, but only works on windows.
sound_fail = sound.Sound('C', secs=0.4, octave=4)


"""
 FUNCTIONS
"""

def ask(text='', keyList=None):
    """
    Ask subject something. Shows question and returns answer (keypress)
    and reaction time. Defaults to no text and all keys.
    """
    # Draw the TextStims to visual buffer, then show it and reset timing immediately (at stimulus onset)
github psychopy / psychopy / psychopy / demos / coder / misc / captureFrames.py View on Github external
#!/usr/bin/env python
# -*- coding: utf-8 -*-

"""
Demo of how to copy pixels from the frame buffer
"""

from __future__ import division

from builtins import range
from psychopy import visual, core, logging
logging.console.setLevel(logging.INFO)

win = visual.Window([200, 200])
myStim = visual.GratingStim(win, pos=[-0.5, -0.5],
    size=1, sf=5, color=[0, 1, 1], ori=30, mask='gauss', autoLog=False)

n = 10
for frameN in range(n):
    myStim.setPhase(0.1, '+')
    myStim.draw()
    # you can either read from the back buffer BEFORE win.flip() or
    # from the front buffer just AFTER the flip. The former has the
    # advantage that it won't be affected by other windows whereas
    # latter can be.
    win.getMovieFrame(buffer='back')
    win.flip()

# save the movie in the format of your choice
win.saveMovieFrames('frame.png', clearFrames=False)
win.saveMovieFrames('myMovie.gif', clearFrames=False)
github isolver / ioHub / devices / _to_finish / mbed.TBC / mbedTest / run.py View on Github external
# get the index of the screen to create the PsychoPy window in.
        screen_index=display.getStimulusScreenIndex()

        # Create a psychopy window, full screen resolution, full screen mode, pix units, with no boarder, using the monitor
        # profile name 'test monitor, which is created on the fly right now by the script
        psychoWindow = visual.Window(screen_resolution, monitor="testMonitor", units=coord_type, fullscr=True, allowGUI=False,screen=screen_index)

        # Hide the 'system mouse cursor' so we can display a cool gaussian mask for a mouse cursor.
        mouse.setSystemCursorVisibility(False)

        # Create an ordered dictionary of psychopy stimuli. An ordered dictionary is one that returns keys in the order
        # they are added, you you can use it to reference stim by a name or by 'zorder'
        psychoStim=OrderedDict()
        psychoStim['grating'] = visual.PatchStim(psychoWindow, mask="circle", size=75,pos=[-100,0], sf=.075)
        psychoStim['fixation'] =visual.PatchStim(psychoWindow, size=25, pos=[0,0], sf=0,  color=[-1,-1,-1], colorSpace='rgb')
        psychoStim['mouseDot'] =visual.GratingStim(psychoWindow,tex=None, mask="gauss", pos=currentPosition,size=(50,50),color='purple')
        psychoStim['mbedReply'] = visual.TextStim(win=psychoWindow, text='', pos = [0,300], height=48, color=[-1,-1,-1], colorSpace='rgb',alignHoriz='left',wrapWidth=800.0)

        # Clear all events from the global event buffer, and from the keyboard event buffer.
        self.hub.clearEvents()
        self.hub.clearEvents('kb')

        QUIT_EXP=False
        # Loop until we get a keyboard event with the space, Enter (Return), or Escape key is pressed.
        while QUIT_EXP is False:

            # for each loop, update the grating phase
            psychoStim['grating'].setPhase(0.05, '+')#advance phase by 0.05 of a cycle

            # and update the mouse contingent gaussian based on the current mouse location
            currentPosition=mouse.getPosition()
            psychoStim['mouseDot'].setPos(currentPosition)
github esdalmaijer / PyGaze / pygaze / libscreen.py View on Github external
if colour == None:
			colour = self.fgc
		if x == None:
			x = self.dispsize[0]/2
		if y == None:
			y = self.dispsize[1]/2

		pos = x,y
		colour = rgb2psychorgb(colour)
		pos = pos2psychopos(pos,dispsize=self.dispsize)
		pos = pos[0] + w/2, pos[1] - h/2

		self.screen.append(GratingStim(expdisplay, tex=None, mask=None, pos=pos, size=[w,h], color=colour))
		if not fill:
			self.screen.append(GratingStim(expdisplay, tex=None, mask=None, pos=pos, size=[w-2,h-2], color=rgb2psychorgb(self.bgc)))
github psychopy / psychopy / psychopy / demos / coder / iohub / eyetracking / eyetribeexample.py View on Github external
#print 'Display Default Eye Distance: ', display.getDefaultEyeDistance()        
#print 'Display Physical Dimensions: ', display.getPhysicalDimensions()        
#print 'Display Resolution: ', display.getPixelResolution()

res=display.getPixelResolution() # Current pixel resolution of the Display to be used
coord_type=display.getCoordinateType()
window=visual.Window(res,monitor=display.getPsychopyMonitorName(), # name of the PsychoPy Monitor Config file if used.
                            units=coord_type, # coordinate space to use.
                            fullscr=True, # We need full screen mode.
                            allowGUI=False, # We want it to be borderless
                            screen= display.getIndex() # The display index to use, assuming a multi display setup.
                            )

# Create a circle to use for the Gaze Cursor. Current units assume pix.
#
gaze_dot =visual.GratingStim(window,tex=None, mask="gauss",
                             pos=(0,0 ),size=(66,66),color='green',
                                                units=coord_type)


io.clearEvents("all")   
tracker.enableEventReporting(True)
     
while not kb.getEvents():   
    # Get the latest gaze position in display coord space..
    #
    gpos=tracker.getPosition()
    if type(gpos) in [tuple,list]:
        # If we have a gaze position from the tracker,
        # redraw the background image and then the
        # gaze_cursor at the current eye position.
        #
github psychopy / psychopy / psychopy / demos / coder / experiment control / autoDraw_autoLog.py View on Github external
One easy way to handle stimuli that are drawn repeatedly is to
setAutoDraw(True) for that stimulus. It will continue to be drawn until
stim.setAutoDraw(False) is called. By default a logging message of
level EXP will be created when the setAutoDraw is called.

This can be turned off for each call with stim.setAutoDraw(True, autoLog=False)
"""

from __future__ import division

from psychopy import visual, core

win = visual.Window([800, 800])

# a stim's name is used in log entries
stim1 = visual.GratingStim(win, pos=[-0.5, -0.5], name='stim1')
stim2 = visual.TextStim(win, pos=[0.5, 0.5], text='stim2', name='textStim')

# no need to log the fixation point info, use autoLog=False
fixation = visual.GratingStim(win, mask='gauss', tex=None, size=0.02,
    name='fixation', autoLog=False)

fixation.setAutoDraw(True)
stim1.setAutoDraw(True)
stim2.setAutoDraw(True)
# both on
for frameN in range(20):  # run 20 frames like this
    win.flip()

stim2.setAutoDraw(False)
# will draw only stim1 (and fixation)
for frameN in range(20):  # run 20 frames like this
github psychopy / psychopy / psychopy / demos / coder / iohub_extended / eyetrackerExamples / sequentialFixationTask / experimentResources.py View on Github external
units='pix',pos=(0,0))
        self.stimNames.append('OUTER_POINT')
        self.stim['OUTER_POINT'].setFillColor(self.TARGET_OUTER_COLOR,'rgb255')

        self.stim['INNER_POINT']=visual.Circle(self.window(),
                        radius=(self.TARGET_INNER_RADIUS,self.TARGET_INNER_RADIUS),
                        lineWidth=0, lineColor=None, lineColorSpace='rgb255', 
                        name='FP_INNER', opacity=1.0, interpolate=False, 
                        units='pix', pos=(0,0))
        self.stimNames.append('INNER_POINT')        
        self.stim['INNER_POINT'].setFillColor(self.TARGET_INNER_COLOR,'rgb255')

        self._showDynamicStim=False
        self.dynamicStimPositionFuncPtr=None
        ppd=experimentRuntime.devices.display.getPixelsPerDegree()
        self.stim['DYNAMIC_STIM']=visual.GratingStim(self.window(),tex=None, 
                        mask="gauss", pos=[0,0],size=ppd,color='purple',opacity=0.0)
        self.stimNames.append('DYNAMIC_STIM')
        
        self.nextAreaOfInterest=None
        self.aoiTriggeredTime=None
        self.aoiTriggeredID=None
github psychopy / psychopy / psychopy / demos / coder / stimuli / imagesAndPatches.py View on Github external
from __future__ import division
from __future__ import print_function

from psychopy import core, visual, event

# Create a window to draw in
win = visual.Window((800, 800), monitor='testMonitor', allowGUI=False, color='black')

# Initialize some stimuli
beach = visual.ImageStim(win, image='beach.jpg', flipHoriz=True, pos=(0, 4.50), units='deg')
faceRGB = visual.ImageStim(win, image='face.jpg', mask=None,
    pos=(50, -50), size=None,  # will be the size of the original image in pixels
    units='pix', interpolate=True, autoLog=False)
print("original image size:", faceRGB.size)
faceALPHA = visual.GratingStim(win, pos=(-0.7, -0.2),
    tex="sin", mask="face.jpg", color=[1.0, 1.0, -1.0],
    size=(0.5, 0.5), units="norm", autoLog=False)
message = visual.TextStim(win, pos=(-0.95, -0.95),
    text='[Esc] to quit', color='white',
    anchorVert='bottom', anchorHoriz='left')

trialClock = core.Clock()
t = lastFPSupdate = 0
win.recordFrameIntervals = True
while not event.getKeys():
    t = trialClock.getTime()
    # Images can be manipulated on the fly
    faceRGB.ori += 1  # advance ori by 1 degree
    faceRGB.draw()
    faceALPHA.phase += 0.01  # advance phase by 1/100th of a cycle
    faceALPHA.draw()