From 8226b1756095af575a212a394b8e8c5e6a5f536b Mon Sep 17 00:00:00 2001 From: olima957 <olima957@student.liu.se> Date: Thu, 3 Oct 2024 14:31:48 +0200 Subject: [PATCH] Laddade upp nya datalager filer. --- MyPortfolio/data.json | 118 ++++++++++++------ MyPortfolio/data.py | 259 +++++++++++++++++++++++++++++++++++++++ MyPortfolio/data_test.py | 248 +++++++++++++++++++++++++++++++++++++ 3 files changed, 588 insertions(+), 37 deletions(-) create mode 100644 MyPortfolio/data.py create mode 100644 MyPortfolio/data_test.py diff --git a/MyPortfolio/data.json b/MyPortfolio/data.json index 11ef2f0..9956065 100644 --- a/MyPortfolio/data.json +++ b/MyPortfolio/data.json @@ -1,37 +1,81 @@ -{ - "projects": [ - { - "title": "project_example", - "project_id": 0, - "techniques": "techniques", - "desc": "description", - "img_url": "img_url", - "url": "url" - }, - { - "title": "MyProject", - "project_id": 1, - "techniques": [ - "python", - "css", - "html", - "flask" - ], - "desc": "It's great!", - "img_url": "logo.jpg", - "url": "https://www.github.com/" - }, - { - "title": "MySecondProject", - "project_id": 2, - "techniques": [ - "python", - "html", - "css" - ], - "desc": "This is a python project!", - "img_url": "logo.png", - "url": "https://www.github.com" - } - ] -} +[ + { + "start_date": "2009-09-05", + "short_description": "no", + "course_name": "OK\u00c4NT", + "long_description": "no no no", + "group_size": 2, + "academic_credits": "WUT?", + "lulz_had": "many", + "external_link": "YY", + "small_image": "X", + "techniques_used": [ + "python" + ], + "project_name": "python data-module test script", + "course_id": "TDP003", + "end_date": "2009-09-06", + "project_id": 1, + "big_image": "XXX" + }, + { + "start_date": "2009-09-07", + "short_description": "no", + "course_name": "OK\u00c4NT", + "long_description": "no no no", + "group_size": 4, + "academic_credits": "WUT?", + "lulz_had": "few", + "external_link": "YY", + "small_image": "X", + "techniques_used": [ + "c++", + "csv", + "python" + ], + "project_name": "NEJ", + "course_id": "TDP003", + "end_date": "2009-09-08", + "project_id": 3, + "big_image": "XXX" + }, + { + "start_date": "2009-09-08", + "short_description": "no", + "course_name": "OK\u00c4NT", + "long_description": "no no no", + "group_size": 6, + "academic_credits": "WUT?", + "lulz_had": "medium", + "external_link": "YY", + "small_image": "X", + "techniques_used": [ + "ada", + "python" + ], + "project_name": "2007", + "course_id": "TDP003", + "end_date": "2009-09-09", + "project_id": 2, + "big_image": "XXX" + }, + { + "start_date": "2009-09-06", + "short_description": "no", + "course_name": "HOHO", + "long_description": "no no no", + "group_size": 8, + "academic_credits": "WUT?", + "lulz_had": "over 9000", + "external_link": "YY", + "small_image": "X", + "techniques_used": [ + + ], + "project_name": ",", + "course_id": " \"", + "end_date": "2009-09-07", + "project_id": 4, + "big_image": "XXX" + } +] diff --git a/MyPortfolio/data.py b/MyPortfolio/data.py new file mode 100644 index 0000000..c386d5e --- /dev/null +++ b/MyPortfolio/data.py @@ -0,0 +1,259 @@ +#!/.venv/bin/python + +# TODO + +# Gör project_id dynamisk så att den uppdateras efter borttagning/addering av projekt. + + + +# ---- IMPORTS ---- # +import os +import json +import pprint +import re +from operator import itemgetter +# ---- IMPORTS ---- # + + +def load(filename): + + try: + with open(filename, 'r', encoding='utf-8') as file: + data = json.load(file) + + file.close() + return data + except: + return None + + + +def save(data): + + with open('data.json', 'w', encoding='utf-8') as file: + json.dump(data, file, ensure_ascii=False, indent=4) + + file.close() + + # Update unique techniques if user added new ones to the file. + get_techniques_stats(data) + + # Reload data + load() + + + +# Get project by ID +def get_project(data, id): + + for n in range(0, get_project_count(data)): + if data[n]['project_id'] == id: + return data[n] + + + +# Get project count +def get_project_count(data): + return len(data) + + + +# Get all unique techniques from project +def get_techniques(data): + + techniques = [] + for project in data: + for tech in project['techniques_used']: + if tech not in techniques: + techniques.append(tech) + + techniques.sort() + + return techniques + + + +# Gets all unique techniques from all projects ! COULD USE SOME FILTERING ! +def get_technique_stats(data): + + technique_list = get_techniques(data) + technique_stats = {} + + for project in data: + for technique in technique_list: + if technique in project['techniques_used']: + buffer = [{'id' : project['project_id'], 'name' : project['project_name']}] + technique_stats.update({technique : buffer}) + + + + + pprint.pp(technique_stats) + return technique_stats + + + +# Fetches and sorts projects matching criteria from the specified list. +def search(data, sort_by='project_id', sort_order='desc', techniques=None, search=None, search_field=None): + + results = [] + # get it + for project in data: + results.append(project) + + # sort it + sorted_list = sorted(results, key=itemgetter(sort_by)) + + # order it + if sort_order == 'asc': results.reverse() + + # filter it (by techniques) + for project in sorted_list: + pass + + pass + + + +def cls(): + os.system('cls' if os.name == 'nt' else 'clear') + pass + + + +def new_project(data): + + cls() + + # ---- COLLECT INFO ---- + project_title = input("Project title: ") + project_id = get_project_count(data)+1 + techniques = input("\nWhat techniques does your project use? Write them out in the following format: python, java, html, css\n\nTechniques: ").replace(" ", "").lower().split(",") + description = input("Provide a description of your project: ") + url = input("Provide a link to the source code/demo of your project: ") + img_url = input("Image source (ex: logo.jpg): ") + # ---- COLLECT INFO ---- + + # lexicographical order sort aka alphabetical + techniques.sort() + + new_project = { + "project_name": project_title, + "project_id": project_id, + "used_techniques": techniques, + "long_description": description, + "img_url": img_url, + "url": url + } + + cls() + + print("\n\nProject preview:\n") + pprint.pp(new_project) + + option = int(input("\n1: Create\n2: Cancel\n> ")) + + if option == 1: + + data.append(new_project) + + save(data) + + pass + + + +def list_projects(data): + cls() + for project in data: + pprint.pp(project) + print("\n") + + + +def edit_project(data, id): + + while True: + if id > get_project_count(data) or id < 0: + print("Project ID doesn't exist.\n") + id = int(input("Project_ID to edit: ")) + else: + + cls() + + project = get_project(data, id) + project.pop('project_id') # Project ID shouldn't be changed + + print(f"Editing project: {project['title']}\n") + + pprint.pp(project) + print("") + + for field in enumerate(project): + print(f"{field[0]}: {field[1]}") + + input("\nField to edit: ") + + + + +def delete_project(data): + pass + + + +def menu(data): + + menu_items = ["Add new project", "List projects", "Edit existing project", "Delete project", "Quit"] + menu_index = 0 + + while True: + + cls() + + titular = r""" + ____ _ __ _ _ + | _ \ ___ _ __ | |_ / _| ___ | | (_) ___ + | |_) | / _ \ | '__| | __| | |_ / _ \ | | | | / _ \ + | __/ | (_) | | | | |_ | _| | (_) | | | _ | | | (_) | + |_| \___/ |_| \__| |_| \___/ |_| (_) |_| \___/ + """ + + print(titular) + + for i in menu_items: + print(f"{menu_items.index(i)+1}: {i}") + + try: + option = int(input(f"> ")) + + if option == 1: + new_project(data) + elif option == 2: + list_projects(data) + input() + elif option == 3: + list_projects(data) + edit_project(data, int(input("Project_ID to edit: "))) + elif option == 4: + delete_project(data, int(input("Project_ID to delete: "))) + elif option == 5: + cls() + break + except: + print("") + + + + + +def main(): + + data = load('data.json') + menu(data) + + get_technique_stats(data) + #search(data, 'project_id', 'desc') + +if __name__ == "__main__": + main() diff --git a/MyPortfolio/data_test.py b/MyPortfolio/data_test.py new file mode 100644 index 0000000..93705f3 --- /dev/null +++ b/MyPortfolio/data_test.py @@ -0,0 +1,248 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# Copyright (C) 2011, IDA, Linköping University +# Copyright (C) 2011, Torbjörn Lönnemark <tobbez@ryara.net> +# Copyright (C) 2014, Daniel Persson +import unittest +import data # import the file with your implemented functions +import hashlib +import sys +from operator import itemgetter + +# ----------- HELPER FUNCTIONS ----------- + +def print_tech_dict(d): + for k,v in d.items(): + print("{}: {}".format(k,v)) + for e in v: + print(e) + print() + +def sort_dict(d,sort_key): + for k in d.keys(): + d[k] = sorted(d[k], key = itemgetter(sort_key)) + return d; + +md5 = hashlib.md5 + + +# ----------- TEST CLASS ----------- + +class DataTest(unittest.TestCase): + """ Subclass of the unittest.TestCase class + + Define all tests as a method of this class. Each test must start with the + word test (ex test_load). Within each test method, various assertions + can be made, e.g. checking that what you are testing gives the expected + result. + + Use the method self.assertEqual() to compare an expected and observed result. + + Please refer to the unittest documentation for more information: + https://docs.python.org/3.7/library/unittest.html + + To run the tests, have the files data_test.py, data.py and data.json in the + same catalog. data.py is the file with your implemented API functions. + Execute with: + + $ python3 data_test.py + + The test result is shown in the terminal. + + """ + + def setUp(self): + """ The setUp() method is called before every test_*-method. Use it to + prepare things that must always be done before a test is run, such as + loading the data. + """ + + # The content in self.expected_data must match the content in data.json + # for the testing to work. Do NOT change the content or the file! + self.expected_data = [{'big_image': 'XXX', + 'project_name': 'python data-module test script', + 'course_name': 'OK\xc4NT', + 'group_size': 2, 'end_date': '2009-09-06', + 'techniques_used': ['python'], + 'academic_credits': 'WUT?', + 'small_image': 'X', + 'long_description': 'no no no', + 'course_id': 'TDP003', + 'project_id': 1, + 'external_link': 'YY', + 'short_description': 'no', + 'start_date': '2009-09-05', + 'lulz_had': 'many'}, + {'big_image': 'XXX', + 'project_name': 'NEJ', + 'course_name': 'OK\xc4NT', + 'group_size': 4, + 'end_date': '2009-09-08', + 'techniques_used': ['c++', 'csv', 'python'], + 'academic_credits': 'WUT?', + 'small_image': 'X', + 'long_description': 'no no no', + 'course_id': 'TDP003', + 'project_id': 3, + 'external_link': 'YY', + 'short_description': 'no', + 'start_date': '2009-09-07', + 'lulz_had': 'few'}, + {'big_image': 'XXX', + 'project_name': '2007', + 'course_name': 'OK\xc4NT', + 'group_size': 6, + 'end_date': '2009-09-09', + 'techniques_used': ['ada', 'python'], + 'academic_credits': 'WUT?', + 'small_image': 'X', + 'long_description': 'no no no', + 'course_id': 'TDP003', + 'project_id': 2, + 'external_link': 'YY', + 'short_description': 'no', + 'start_date': '2009-09-08', + 'lulz_had': 'medium'}, + {'big_image': 'XXX', + 'project_name': ',', + 'course_name': 'HOHO', + 'group_size': 8, + 'end_date': '2009-09-07', + 'techniques_used': [], + 'academic_credits': 'WUT?', + 'small_image': 'X', + 'long_description': 'no no no', + 'course_id': ' "', + 'project_id': 4, + 'external_link': 'YY', + 'short_description': 'no', + 'start_date': '2009-09-06', + 'lulz_had': 'over 9000'} + ] + + # Sort the expected data by project id + self.expected_data = sorted(self.expected_data, key=itemgetter('project_id')) + + # Store the hardcoded expected results. + # Do NOT change this part + self.expected_technique_data = ['ada', 'c++', 'csv', 'python'] + self.expected_technique_stat_data = {'python': [{'id': 2, 'name': '2007'}, + {'id': 3, 'name': 'NEJ'}, + {'id': 1, 'name': 'python data-module test script'}], + 'csv': [{'id': 3, 'name': 'NEJ'}], + 'c++': [{'id': 3, 'name': 'NEJ'}], + 'ada': [{'id': 2, 'name': '2007'}]} + + # Load the data using your implemented load function. The data is + # stored as a member of the class instance, so that it can be accessed + # in other methods of the class + self.loaded_data = sorted(data.load("data.json"), key=itemgetter('project_id')) + + def test_load(self): + """ Test the implemented load function """ + + # Compare the loaded data with the expected data + self.assertEqual(self.loaded_data[0], self.expected_data[0]) + + # Test that loading a non-existing file returns None + self.assertEqual(data.load("/dev/this_file_does_not_exist"), None) + + def test_get_project_count(self): + """ Test the implemented function get_project_count """ + + # Test that the correct number of projects are returned + self.assertEqual(data.get_project_count(self.loaded_data), 4) + + def test_get_project(self): + """ Test the implemented function get_project """ + + # Try to get project 1, 2, 3 and 4 and check that a project with + # the correct project_id is returned. + self.assertEqual(data.get_project(self.loaded_data, 1)['project_id'], 1) + self.assertEqual(data.get_project(self.loaded_data, 2)['project_id'], 2) + self.assertEqual(data.get_project(self.loaded_data, 3)['project_id'], 3) + self.assertEqual(data.get_project(self.loaded_data, 4)['project_id'], 4) + + # Try to get a non-existing project and check that None is returned + self.assertEqual(data.get_project(self.loaded_data, 42), None) + + def test_search(self): + """ Test the implemented search function """ + + # Call search with no other parameters than the database. + # All projects should be returned + self.assertEqual(len(data.search(self.loaded_data)), 4) + + # Search for projects with csv as technique. + # 1 project should be returned + self.assertEqual(len(data.search(self.loaded_data, techniques=['csv'])), 1) + + # Search for projects including Python and sort them in ascending order. + # Ensure that returned projects are sorted by ascending dates + res = data.search(self.loaded_data, sort_order='asc',techniques=["python"]) + self.assertEqual(res[0]['start_date'], '2009-09-05') + self.assertEqual(res[1]['start_date'], '2009-09-07') + self.assertEqual(res[2]['start_date'], '2009-09-08') + + # Search for the term 'okänt' in three specified search fields. Sort + # results by end_date. + # Ensure that projects are returned in the correct order. + res = data.search(self.loaded_data, + sort_by="end_date", + search='okänt', + search_fields=['project_id','project_name','course_name']) + self.assertEqual(len(res), 3) + self.assertEqual(res[0]['project_id'], 2) + self.assertEqual(res[1]['project_id'], 3) + self.assertEqual(res[2]['project_id'], 1) + + # Search for 'okänt' in specified search fields. + # Ensure correct number of results + res = data.search(self.loaded_data, + search="okänt", + search_fields=["project_id","project_name","course_name"]) + self.assertEqual(len(res), 3) + + # Search for 'okänt' in specified search fields, provide empty technique list + # Ensure correct number of results + res = data.search(self.loaded_data, + techniques=[], + search="okänt", + search_fields=["project_id","project_name","course_name"]) + self.assertEqual(len(res), 3) + + # Search for 'okänt', provide empty search fields list + # Ensure 0 results + res = data.search(self.loaded_data, search="okänt", search_fields=[]) + self.assertEqual(len(res), 0) + + # Search with results sorted by group size. + # Ensure results are in descending order + res = data.search(self.loaded_data, sort_by='group_size') + self.assertEqual(res[0]['project_id'], 4) #1 + self.assertEqual(res[1]['project_id'], 2) #2 + self.assertEqual(res[2]['project_id'], 3) #3 + self.assertEqual(res[3]['project_id'], 1) #4 + + def test_get_techniques(self): + """ Test the implemented get_techniques function """ + + res = data.get_techniques(self.loaded_data) + self.assertEqual(res, self.expected_technique_data) + + def test_get_technique_stats(self): + """ Test the implemented get_technique_stats function """ + + res = data.get_technique_stats(self.loaded_data) + res = sort_dict(res,'id') + + self.expected_technique_stat_data = sort_dict(self.expected_technique_stat_data,'id') + + self.assertEqual(res, self.expected_technique_stat_data) + + +if __name__ == '__main__': + print ("Test: ", md5(sys.argv[0].encode('UTF-8')).hexdigest()) + print ("Test data:", md5(b"data.json").hexdigest()) + print() + unittest.main() -- GitLab