Skip to content

Instantly share code, notes, and snippets.

@alvinwan
Last active May 27, 2017 02:04
Show Gist options
  • Save alvinwan/1a6dbfdd78e1898424b948355bae41fd to your computer and use it in GitHub Desktop.
Save alvinwan/1a6dbfdd78e1898424b948355bae41fd to your computer and use it in GitHub Desktop.
Gradescope Autograding for CS189
"""
Check differences between files containing integers.
This script checks the diff between two files and writes to a JSON compatible
with Gradescope's autograder.
Usage:
autograder.py <ref> <subm> [options]
Options:
--out=<out> Path to write results as JSON [default: results.json]
--threshold=<bar> Accuracy needed for full score [default: 0.8]
--points=<pts> Total points for assignment [default: 100]
"""
import docopt
def grade(ref_path: str, subm_path: str) -> float:
"""Grade submission"""
with open(ref_path) as f:
ref = list(f)
with open(subm_path) as f:
subm = list(f)
ref_len, subm_len = float(len(ref)), len(subm)
if len(ref) != len(subm):
raise ValueError('Submission too short (%d instead of %d lines)' %
(subm_len, ref_len))
accuracy = sum(int(a) == int(b) for a, b in zip(ref, subm)) / ref_len
return accuracy
def write_accuracy(
accuracy: float, path: str, threshold: float=0.8, points: int=100):
with open(path, 'w') as f:
score = points if accuracy > threshold else 0.0
f.write('{"score": %f, "output": "Accuracy: %f"}' % (score, accuracy))
def write_error(error, path: str):
with open(path, 'w') as f:
f.write('{"score": -1, "output": "%s"}' % error.args[0])
def main():
"""Main runnable"""
arguments = docopt.docopt(__doc__)
ref = arguments['<ref>']
subm = arguments['<subm>']
out = arguments['--out']
threshold = float(arguments['--threshold'])
points = float(arguments['--points'])
try:
accuracy = grade(ref, subm)
write_accuracy(accuracy, out, threshold, points)
except UserWarning as error:
write_error(error, out)
except ValueError as error:
write_error(error, out)
if __name__ == '__main__':
main()
0
1
1
0
1
0
0
1
1
1
docopt==0.6.2
numpy==1.12.1
#!/bin/bash
python3 /autograder/source/autograder.py /autograder/source/reference.txt /autograder/submission/submission.txt --out=/autograder/results/results.json
#!/bin/bash
apt-get install -y python3-pip
pip3 install -r /autograder/source/requirements.txt
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment