Autograder [Sat Apr 18 04:22:05 2020]: Received job 11785-s20_hw4p1_1_kbasulai@andrew.cmu.edu:91
Autograder [Sat Apr 18 04:22:22 2020]: Success: Autodriver returned normally
Autograder [Sat Apr 18 04:22:22 2020]: Here is the output from the autograder:
---
Autodriver: Job exited with status 0
mkdir -p handin
tar xf handin.tar -C handin
tar xf autograde.tar
AUTOLAB=1 /usr/local/depot/anaconda3/bin/python3 autograde/runner.py --module-path=./handin/
FF
=================================== FAILURES ===================================
_______________________________ test_generation ________________________________

    def test_generation():
        inp = np.load(fixture_path('generation.npy'))
        forward = 10
        n = inp.shape[0]
        t = inp.shape[1]
>       pred = np.load(handin_path('generated_logits.npy'))

autograde/tests/test_generation.py:222: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

file = '/home/autograde/autolab/autograde/../handin/generated_logits.npy'
mmap_mode = None, allow_pickle = True, fix_imports = True, encoding = 'ASCII'

    def load(file, mmap_mode=None, allow_pickle=True, fix_imports=True,
             encoding='ASCII'):
        """
        Load arrays or pickled objects from ``.npy``, ``.npz`` or pickled files.
    
        Parameters
        ----------
        file : file-like object or string
            The file to read. File-like objects must support the
            ``seek()`` and ``read()`` methods. Pickled files require that the
            file-like object support the ``readline()`` method as well.
        mmap_mode : {None, 'r+', 'r', 'w+', 'c'}, optional
            If not None, then memory-map the file, using the given mode (see
            `numpy.memmap` for a detailed description of the modes).  A
            memory-mapped array is kept on disk. However, it can be accessed
            and sliced like any ndarray.  Memory mapping is especially useful
            for accessing small fragments of large files without reading the
            entire file into memory.
        allow_pickle : bool, optional
            Allow loading pickled object arrays stored in npy files. Reasons for
            disallowing pickles include security, as loading pickled data can
            execute arbitrary code. If pickles are disallowed, loading object
            arrays will fail.
            Default: True
        fix_imports : bool, optional
            Only useful when loading Python 2 generated pickled files on Python 3,
            which includes npy/npz files containing object arrays. If `fix_imports`
            is True, pickle will try to map the old Python 2 names to the new names
            used in Python 3.
        encoding : str, optional
            What encoding to use when reading Python 2 strings. Only useful when
            loading Python 2 generated pickled files on Python 3, which includes
            npy/npz files containing object arrays. Values other than 'latin1',
            'ASCII', and 'bytes' are not allowed, as they can corrupt numerical
            data. Default: 'ASCII'
    
        Returns
        -------
        result : array, tuple, dict, etc.
            Data stored in the file. For ``.npz`` files, the returned instance
            of NpzFile class must be closed to avoid leaking file descriptors.
    
        Raises
        ------
        IOError
            If the input file does not exist or cannot be read.
        ValueError
            The file contains an object array, but allow_pickle=False given.
    
        See Also
        --------
        save, savez, savez_compressed, loadtxt
        memmap : Create a memory-map to an array stored in a file on disk.
    
        Notes
        -----
        - If the file contains pickle data, then whatever object is stored
          in the pickle is returned.
        - If the file is a ``.npy`` file, then a single array is returned.
        - If the file is a ``.npz`` file, then a dictionary-like object is
          returned, containing ``{filename: array}`` key-value pairs, one for
          each file in the archive.
        - If the file is a ``.npz`` file, the returned value supports the
          context manager protocol in a similar fashion to the open function::
    
            with load('foo.npz') as data:
                a = data['a']
    
          The underlying file descriptor is closed when exiting the 'with'
          block.
    
        Examples
        --------
        Store data to disk, and load it again:
    
        >>> np.save('/tmp/123', np.array([[1, 2, 3], [4, 5, 6]]))
        >>> np.load('/tmp/123.npy')
        array([[1, 2, 3],
               [4, 5, 6]])
    
        Store compressed data to disk, and load it again:
    
        >>> a=np.array([[1, 2, 3], [4, 5, 6]])
        >>> b=np.array([1, 2])
        >>> np.savez('/tmp/123.npz', a=a, b=b)
        >>> data = np.load('/tmp/123.npz')
        >>> data['a']
        array([[1, 2, 3],
               [4, 5, 6]])
        >>> data['b']
        array([1, 2])
        >>> data.close()
    
        Mem-map the stored array, and then access the second row
        directly from disk:
    
        >>> X = np.load('/tmp/123.npy', mmap_mode='r')
        >>> X[1, :]
        memmap([4, 5, 6])
    
        """
        import gzip
    
        own_fid = False
        if isinstance(file, basestring):
>           fid = open(file, "rb")
E           FileNotFoundError: [Errno 2] No such file or directory: '/home/autograde/autolab/autograde/../handin/generated_logits.npy'

/usr/local/depot/anaconda3/lib/python3.5/site-packages/numpy/lib/npyio.py:362: FileNotFoundError
_______________________________ test_prediction ________________________________

    def test_prediction():
        fixture = np.load(fixture_path('prediction.npz'))
        inp = fixture['inp']
        targ = fixture['out']
>       out = np.load(handin_path('predictions.npy'))

autograde/tests/test_prediction.py:23: 
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ 

file = '/home/autograde/autolab/autograde/../handin/predictions.npy'
mmap_mode = None, allow_pickle = True, fix_imports = True, encoding = 'ASCII'

    def load(file, mmap_mode=None, allow_pickle=True, fix_imports=True,
             encoding='ASCII'):
        """
        Load arrays or pickled objects from ``.npy``, ``.npz`` or pickled files.
    
        Parameters
        ----------
        file : file-like object or string
            The file to read. File-like objects must support the
            ``seek()`` and ``read()`` methods. Pickled files require that the
            file-like object support the ``readline()`` method as well.
        mmap_mode : {None, 'r+', 'r', 'w+', 'c'}, optional
            If not None, then memory-map the file, using the given mode (see
            `numpy.memmap` for a detailed description of the modes).  A
            memory-mapped array is kept on disk. However, it can be accessed
            and sliced like any ndarray.  Memory mapping is especially useful
            for accessing small fragments of large files without reading the
            entire file into memory.
        allow_pickle : bool, optional
            Allow loading pickled object arrays stored in npy files. Reasons for
            disallowing pickles include security, as loading pickled data can
            execute arbitrary code. If pickles are disallowed, loading object
            arrays will fail.
            Default: True
        fix_imports : bool, optional
            Only useful when loading Python 2 generated pickled files on Python 3,
            which includes npy/npz files containing object arrays. If `fix_imports`
            is True, pickle will try to map the old Python 2 names to the new names
            used in Python 3.
        encoding : str, optional
            What encoding to use when reading Python 2 strings. Only useful when
            loading Python 2 generated pickled files on Python 3, which includes
            npy/npz files containing object arrays. Values other than 'latin1',
            'ASCII', and 'bytes' are not allowed, as they can corrupt numerical
            data. Default: 'ASCII'
    
        Returns
        -------
        result : array, tuple, dict, etc.
            Data stored in the file. For ``.npz`` files, the returned instance
            of NpzFile class must be closed to avoid leaking file descriptors.
    
        Raises
        ------
        IOError
            If the input file does not exist or cannot be read.
        ValueError
            The file contains an object array, but allow_pickle=False given.
    
        See Also
        --------
        save, savez, savez_compressed, loadtxt
        memmap : Create a memory-map to an array stored in a file on disk.
    
        Notes
        -----
        - If the file contains pickle data, then whatever object is stored
          in the pickle is returned.
        - If the file is a ``.npy`` file, then a single array is returned.
        - If the file is a ``.npz`` file, then a dictionary-like object is
          returned, containing ``{filename: array}`` key-value pairs, one for
          each file in the archive.
        - If the file is a ``.npz`` file, the returned value supports the
          context manager protocol in a similar fashion to the open function::
    
            with load('foo.npz') as data:
                a = data['a']
    
          The underlying file descriptor is closed when exiting the 'with'
          block.
    
        Examples
        --------
        Store data to disk, and load it again:
    
        >>> np.save('/tmp/123', np.array([[1, 2, 3], [4, 5, 6]]))
        >>> np.load('/tmp/123.npy')
        array([[1, 2, 3],
               [4, 5, 6]])
    
        Store compressed data to disk, and load it again:
    
        >>> a=np.array([[1, 2, 3], [4, 5, 6]])
        >>> b=np.array([1, 2])
        >>> np.savez('/tmp/123.npz', a=a, b=b)
        >>> data = np.load('/tmp/123.npz')
        >>> data['a']
        array([[1, 2, 3],
               [4, 5, 6]])
        >>> data['b']
        array([1, 2])
        >>> data.close()
    
        Mem-map the stored array, and then access the second row
        directly from disk:
    
        >>> X = np.load('/tmp/123.npy', mmap_mode='r')
        >>> X[1, :]
        memmap([4, 5, 6])
    
        """
        import gzip
    
        own_fid = False
        if isinstance(file, basestring):
>           fid = open(file, "rb")
E           FileNotFoundError: [Errno 2] No such file or directory: '/home/autograde/autolab/autograde/../handin/predictions.npy'

/usr/local/depot/anaconda3/lib/python3.5/site-packages/numpy/lib/npyio.py:362: FileNotFoundError
Run time:  1.08363938331604
{"scores": {"Generation": 0.0, "Prediction": 0.0}}