diff --git a/.DS_Store b/.DS_Store
new file mode 100644
index 00000000..f938976c
Binary files /dev/null and b/.DS_Store differ
diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 00000000..e69de29b
diff --git a/.idea/DS-Unit-3-Sprint-2-SQL-and-Databases.iml b/.idea/DS-Unit-3-Sprint-2-SQL-and-Databases.iml
new file mode 100644
index 00000000..d0876a78
--- /dev/null
+++ b/.idea/DS-Unit-3-Sprint-2-SQL-and-Databases.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/dataSources.local.xml b/.idea/dataSources.local.xml
new file mode 100644
index 00000000..fe6d5dba
--- /dev/null
+++ b/.idea/dataSources.local.xml
@@ -0,0 +1,10 @@
+
+
+
+
+
+ no-auth
+
+
+
+
\ No newline at end of file
diff --git a/.idea/dataSources.xml b/.idea/dataSources.xml
new file mode 100644
index 00000000..2c5e5cfb
--- /dev/null
+++ b/.idea/dataSources.xml
@@ -0,0 +1,16 @@
+
+
+
+
+ sqlite.xerial
+ true
+ org.sqlite.JDBC
+ jdbc:sqlite:$PROJECT_DIR$/module1-introduction-to-sql/rpg_db.sqlite3
+
+
+ file://$APPLICATION_CONFIG_DIR$/jdbc-drivers/Xerial SQLiteJDBC/3.31.1/sqlite-jdbc-3.31.1.jar
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 00000000..f090f250
--- /dev/null
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,16 @@
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 00000000..105ce2da
--- /dev/null
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 00000000..d1e22ecb
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 00000000..c2a4bb2f
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 00000000..94a25f7f
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/workspace.xml b/.idea/workspace.xml
new file mode 100644
index 00000000..637e7372
--- /dev/null
+++ b/.idea/workspace.xml
@@ -0,0 +1,81 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ 1597906696621
+
+
+ 1597906696621
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/.idea/.gitignore b/Study_Guide_Resources/.idea/.gitignore
new file mode 100644
index 00000000..73f69e09
--- /dev/null
+++ b/Study_Guide_Resources/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
+# Editor-based HTTP Client requests
+/httpRequests/
diff --git a/Study_Guide_Resources/.idea/Study_Guide__&_Resources.iml b/Study_Guide_Resources/.idea/Study_Guide__&_Resources.iml
new file mode 100644
index 00000000..e7bd087d
--- /dev/null
+++ b/Study_Guide_Resources/.idea/Study_Guide__&_Resources.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/.idea/dataSources.xml b/Study_Guide_Resources/.idea/dataSources.xml
new file mode 100644
index 00000000..8ae6ce9b
--- /dev/null
+++ b/Study_Guide_Resources/.idea/dataSources.xml
@@ -0,0 +1,27 @@
+
+
+
+
+ sqlite.xerial
+ true
+ org.sqlite.JDBC
+ jdbc:sqlite:$PROJECT_DIR$/example_db.sqlite
+
+
+ file://$APPLICATION_CONFIG_DIR$/jdbc-drivers/Xerial SQLiteJDBC/3.31.1/sqlite-jdbc-3.31.1.jar
+
+
+
+
+ sqlite.xerial
+ true
+ org.sqlite.JDBC
+ jdbc:sqlite:$PROJECT_DIR$/rpg_db.sqlite3
+
+
+ file://$APPLICATION_CONFIG_DIR$/jdbc-drivers/Xerial SQLiteJDBC/3.31.1/sqlite-jdbc-3.31.1.jar
+
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/.idea/inspectionProfiles/Project_Default.xml b/Study_Guide_Resources/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 00000000..5cb71ef0
--- /dev/null
+++ b/Study_Guide_Resources/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/.idea/inspectionProfiles/profiles_settings.xml b/Study_Guide_Resources/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 00000000..105ce2da
--- /dev/null
+++ b/Study_Guide_Resources/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/.idea/misc.xml b/Study_Guide_Resources/.idea/misc.xml
new file mode 100644
index 00000000..914edf90
--- /dev/null
+++ b/Study_Guide_Resources/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/.idea/modules.xml b/Study_Guide_Resources/.idea/modules.xml
new file mode 100644
index 00000000..5d8f9f12
--- /dev/null
+++ b/Study_Guide_Resources/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/.idea/sqldialects.xml b/Study_Guide_Resources/.idea/sqldialects.xml
new file mode 100644
index 00000000..0cf6113b
--- /dev/null
+++ b/Study_Guide_Resources/.idea/sqldialects.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/.idea/vcs.xml b/Study_Guide_Resources/.idea/vcs.xml
new file mode 100644
index 00000000..6c0b8635
--- /dev/null
+++ b/Study_Guide_Resources/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/Study_Guide_Resources/Intro-to-SQL-notes.pdf b/Study_Guide_Resources/Intro-to-SQL-notes.pdf
new file mode 100644
index 00000000..619171ff
Binary files /dev/null and b/Study_Guide_Resources/Intro-to-SQL-notes.pdf differ
diff --git a/Study_Guide_Resources/Make_Insert_Data.py b/Study_Guide_Resources/Make_Insert_Data.py
new file mode 100644
index 00000000..1f98a2d5
--- /dev/null
+++ b/Study_Guide_Resources/Make_Insert_Data.py
@@ -0,0 +1,52 @@
+import pandas as pd
+import sqlite3
+
+# Creating & Inserting Data W/ Sqlite
+import sqlite3
+
+conn = sqlite3.connect('example_db.sqlite')
+
+
+def create_statement(conn):
+ curs = conn.cursor()
+ create_statement = """
+ CREATE TABLE if not exists students1 (
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ name CHAR(30),
+ favorite_number INTEGER,
+ least_favorite_number INTEGER
+ );
+ """
+ curs.execute(create_statement)
+ curs.close()
+ conn.commit()
+
+
+def insert_data(conn):
+ # SIMILAR TO GETTING DATA OF ROWS USING SL_CURS.EXECUTE AND THEN
+ # FETCHING
+ my_data = [
+ ('Jon', 7, 12),
+ ('Alejandro', 77, 43),
+ ('Rivera', 100, 137)
+ ]
+ for row in my_data:
+ curs = conn.cursor()
+ insert_row = """
+ INSERT INTO students
+ (name ,favorite_number, least_favorite_number)
+ VALUES""" + str(row) + ";"
+ curs.execute(insert_row)
+ conn.commit()
+
+
+# Creates student table
+create_statement(conn)
+
+# Insert data from my data into students
+insert_data(conn)
+# commit after all insert statements have been integrated into
+# data
+curs = conn.cursor()
+print(curs.execute('SELECT * FROM students LIMIT 10;'))
+print(curs.fetchall)
diff --git a/Study_Guide_Resources/Pipfile b/Study_Guide_Resources/Pipfile
new file mode 100644
index 00000000..b723d019
--- /dev/null
+++ b/Study_Guide_Resources/Pipfile
@@ -0,0 +1,11 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/Study_Guide_Resources/Pipfile.lock b/Study_Guide_Resources/Pipfile.lock
new file mode 100644
index 00000000..9a51a282
--- /dev/null
+++ b/Study_Guide_Resources/Pipfile.lock
@@ -0,0 +1,20 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "7e7ef69da7248742e869378f8421880cf8f0017f96d94d086813baa518a65489"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {}
+}
diff --git a/Study_Guide_Resources/Queries_Rpg.py b/Study_Guide_Resources/Queries_Rpg.py
new file mode 100644
index 00000000..dcb48904
--- /dev/null
+++ b/Study_Guide_Resources/Queries_Rpg.py
@@ -0,0 +1,98 @@
+# This directory contains a file rpg_db.sqlite3, a database for a hypothetical webapp role-playing game.
+# This test data has dozens-to-hundreds of randomly generated characters across the base classes
+# (Fighter, Mage, Cleric, and Thief) as well as a few Necromancers. Also generated are Items, Weapons, and
+# connections from characters to them. Note that, while the name field was randomized, the numeric and boolean fields were left as defaults.
+# Use sqlite3 to load and write queries to explore the data, and answer the following questions:
+# How many total Characters are there?
+
+import sqlite3
+
+
+def connect_to_db(db_name='rpg_db.sqlite3'):
+ return sqlite3.connect(db_name)
+
+
+def execute_query(cursor, query):
+ cursor.execute(query)
+ return cursor.fetchall()
+
+
+"302 Characters"
+"""
+SELECT COUNT(*) AS Number of Characters
+FROM charactercreator_character;
+"""
+
+# How many of each specific subclass?
+"108 Mage"
+"""
+SELECT COUNT(*) AS Mage_Counts
+FROM charactercreator_character AS CC
+JOIN charactercreator_mage AS CCM ON CCM.character_ptr_id = CC.character_id;
+"""
+
+# How many total Items?
+"174 items"
+"""
+SELECT COUNT(DISTINCT(item_id))
+FROM armory_item;
+"""
+
+# How many of the Items are weapons? How many are not?
+"37 Weapons"
+"""
+SELECT COUNT(*)
+FROM armory_item LEFT JOIN armory_weapon
+ON armory_weapon.item_ptr_id = armory_item.item_id
+WHERE armory_weapon.item_ptr_id NOT NULL
+"""
+
+"137 Non Weapons"
+"""
+SELECT COUNT(*)
+FROM armory_item LEFT JOIN armory_weapon
+ON armory_weapon.item_ptr_id = armory_item.item_id
+WHERE armory_weapon.item_ptr_id is NULL;
+"""
+
+# How many Items does each character have? (Return first 20 rows)
+" Answer in query, prints first 20 rows and items per character"
+"""
+SELECT CC.name, COUNT(CCI.item_id) AS Items_Per_Character
+FROM charactercreator_character AS CC JOIN charactercreator_character_inventory AS CCI
+ON CCI.character_id= CC.character_id
+JOIN armory_item AS AI ON AI.item_id = CCI.item_id
+GROUP BY CC.character_id
+LIMIT 20;
+"""
+# How many Weapons does each character have? (Return first 20 rows)
+"Answer in query, prints first 20 rows and items per character"
+"""
+SELECT CC.name, COUNT(CCI.item_id) AS Weapons_Per_Character
+FROM charactercreator_character AS CC JOIN charactercreator_character_inventory AS CCI
+ON CCI.character_id= CC.character_id
+JOIN armory_weapon AS AW ON AW.item_ptr_id= CCI.item_id
+GROUP BY CC.character_id
+LIMIT 20;
+"""
+
+# On average, how many Items does each Character have?
+"~2.974 Items per character "
+"""
+SELECT AVG(Items_Per_Character) AS FROM (SELECT COUNT(CCI.item_id) AS Items_Per_Character
+FROM charactercreator_character AS CC JOIN charactercreator_character_inventory AS CCI
+ON CCI.character_id= CC.character_id
+JOIN armory_item AS AI ON AI.item_id = CCI.item_id
+GROUP BY CC.character_id);
+"""
+# On average, how many Weapons does each character have?
+
+"~1.31 Weapons per character"
+"""
+SELECT AVG(Weapons_Per_Character) FROM (SELECT CC.name, COUNT(CCI.item_id) AS Weapons_Per_Character
+FROM charactercreator_character AS CC JOIN charactercreator_character_inventory AS CCI
+ON CCI.character_id= CC.character_id
+JOIN armory_weapon AS AW ON AW.item_ptr_id= CCI.item_id
+GROUP BY CC.character_id)
+;
+"""
diff --git a/Study_Guide_Resources/Queries_Titanic.py b/Study_Guide_Resources/Queries_Titanic.py
new file mode 100644
index 00000000..21362f54
--- /dev/null
+++ b/Study_Guide_Resources/Queries_Titanic.py
@@ -0,0 +1,119 @@
+# How many passengers survived, and how many died?
+# How many passengers were in each class?
+# How many passengers survived/died within each class?
+# What was the average age of survivors vs nonsurvivors
+# What was the average age of each passenger class?
+# What was the average fare by passenger class? By survival?
+# How many siblings/spouses aboard on average, by passenger class?By survival?
+# How many parents/children aboard on average, by passenger class? By survival?
+# Do any passengers have the same name? (Bonus! Hard, may require pulling and processing with Python) How
+# many married couples were aboard the Titanic? Assume that two people (one Mr. and one Mrs.) with the same last name
+# and with at least 1 sibling/spouse aboard are a married couple.
+
+import sqlite3
+
+
+def connect_to_db(db_name='Titanic.db'):
+ return sqlite3.connect(db_name)
+
+
+def execute_query(cursor, query):
+ cursor.execute(query)
+ return cursor.fetchall()
+
+
+# Queries
+GET_TOTAL_SURVIVORS = """
+ SELECT SUM(Survivor) AS TOTAL_SURVIVORS
+ FROM (SELECT Name, COUNT(*) AS Survivor
+ FROM TITANIC
+ WHERE Survived = 1
+ GROUP BY Name);
+ """
+
+GET_TOTAL_DEATHS = """
+SELECT SUM(DIED) AS DEATHS
+FROM (SELECT Name, COUNT(*) AS DIED
+FROM TITANIC
+WHERE Survived = 0
+GROUP BY Name);
+"""
+
+GET_SURVIVED_DIED_PER_CLASS = """
+SELECT Pclass, COUNT(Survived) as SURVIVED_OR_DIED_PER_CLASS
+FROM TITANIC
+GROUP BY Pclass;
+"""
+GET_AVG_AGE_SURVIVOR = """
+SELECT AVG(Survivor_Age) FROM(SELECT Name, COUNT(*) AS Survivor, Age AS Survivor_Age
+FROM TITANIC WHERE Survived = 1 GROUP BY Name);
+"""
+
+GET_AVG_AGE_DEATH = """
+SELECT AVG(Death_Age) FROM( SELECT Name, COUNT(*) AS DEATH, Age as Death_Age
+FROM TITANIC WHERE Survived = 0 GROUP BY Name);
+"""
+GET_AVG_AGE_CLASS ="""
+SELECT Pclass, Avg(Age) AS Avg_Age_Class
+FROM TITANIC
+GROUP BY Pclass;
+"""
+
+GET_AVG_FARE_CLASS = """
+SELECT Pclass, AVG(FARE) AS FARES_BY_CLASS
+FROM TITANIC
+GROUP BY Pclass
+"""
+
+GET_AVG_FARE_SURVIVAL = """
+SELECT Survived, AVG(FARE) AS FARES_BY_CLASS
+FROM TITANIC
+GROUP BY Survived;
+"""
+GET_AVG_SIBLINGS_SPOUSES_CLASS = """
+SELECT Pclass, AVG(Siblings_Spouses)
+FROM TITANIC
+GROUP BY Pclass
+"""
+GET_AVG_SIBLINGS_SPOUSES_SURVIVAL = """
+SELECT Survived, AVG(Siblings_Spouses)
+FROM TITANIC
+GROUP BY Survived;
+"""
+
+if __name__ == '__main__':
+ conn = connect_to_db()
+ curs = conn.cursor()
+ total_survivors = execute_query(curs, GET_TOTAL_SURVIVORS)
+ print('Total survivors', total_survivors)
+
+ total_deaths = execute_query(curs, GET_TOTAL_DEATHS)
+ print('Total deaths', total_deaths)
+
+ survived_died_per_class = execute_query(curs, GET_SURVIVED_DIED_PER_CLASS)
+ print('Get survived or Died Per Class', survived_died_per_class)
+
+ avg_age_survivor = execute_query(curs, GET_AVG_AGE_SURVIVOR)
+ print('Average Age SURVIVOR', avg_age_survivor)
+
+ avg_age_death = execute_query(curs, GET_AVG_AGE_DEATH)
+ print('Average Age of Deaths', avg_age_death)
+
+ avg_age_class = execute_query(curs, GET_AVG_AGE_CLASS)
+ print('Average Age of Passenger Class', avg_age_class)
+
+ avg_fare_class = execute_query(curs,GET_AVG_FARE_CLASS)
+ print('Average Fare for Each Class', avg_fare_class)
+
+ avg_fare_survival = execute_query(curs,GET_AVG_FARE_SURVIVAL)
+ print('Average Fare by Survival', avg_fare_survival)
+
+ avg_siblings_spouses_class = execute_query(curs, GET_AVG_SIBLINGS_SPOUSES_CLASS)
+ print('Average Siblings/Spouses by Class', avg_siblings_spouses_class)
+
+ avg_siblings_spouses_survival = execute_query(curs, GET_AVG_SIBLINGS_SPOUSES_SURVIVAL)
+ print('Average Siblings/Spouses by Survival', avg_siblings_spouses_survival)
+
+
+
+
diff --git a/Study_Guide_Resources/SQL-cheat-sheet.pdf b/Study_Guide_Resources/SQL-cheat-sheet.pdf
new file mode 100644
index 00000000..c55ac2a0
Binary files /dev/null and b/Study_Guide_Resources/SQL-cheat-sheet.pdf differ
diff --git a/Study_Guide_Resources/Titanic.db b/Study_Guide_Resources/Titanic.db
new file mode 100644
index 00000000..2a1bbafa
Binary files /dev/null and b/Study_Guide_Resources/Titanic.db differ
diff --git a/Study_Guide_Resources/Unit 3 Sprint 2 SQL and Databases Study Guide.md b/Study_Guide_Resources/Unit 3 Sprint 2 SQL and Databases Study Guide.md
new file mode 100644
index 00000000..b913c529
--- /dev/null
+++ b/Study_Guide_Resources/Unit 3 Sprint 2 SQL and Databases Study Guide.md
@@ -0,0 +1,191 @@
+# Unit 3 Sprint 2 SQL and Databases Study Guide
+
+This study guide should reinforce and provide practice for all of the concepts you have seen in the past week. There are a mix of written questions and coding exercises, both are equally important to prepare you for the sprint challenge as well as to be able to speak on these topics comfortably in interviews and on the job.
+
+If you get stuck or are unsure of something remember the 20 minute rule. If that doesn't help, then research a solution with [google](https://www.google.com) or [StackOverflow](https://www.stackoverflow.com). Only once you have exhausted these methods should you turn to your Team Lead - they won't be there on your SC or during an interview. That being said, don't hesitate to ask for help if you truly are stuck.
+
+Have fun studying!
+
+## SQL
+
+**Concepts:**
+
+1. What is SQL?
+Structure Query Language
+2. What is a RDBMS?
+- Relational Data Base Management System
+The data in RDBMS is stored in database objects called tables.
+A table is a collection of related data entries and it consists of columns and rows.
+
+3. What is an ETL pipeline?
+- Means Extract Transform Load
+- Extracting data from different sources,
+
+4. What is a schema?
+- Finally the information which is now available in a consistent format gets loaded. From now one you can obtain any
+specific piece of data and compare it in relation to any other pieces of data.
+
+5. What does each letter in ACID stand for? Give an explanation for each and why they matter?
+In the context of transaction processing, the acronym ACID refers to the four key properties of a transaction: atomicity, consistency, isolation, and durability.
+
+- Atomicity
+All changes to data are performed as if they are a single operation. That is, all the changes are performed, or none of them are.
+For example, in an application that transfers funds from one account to another, the atomicity property ensures that, if a debit is made successfully from one account, the corresponding credit is made to the other account.
+- Consistency
+Data is in a consistent state when a transaction starts and when it ends.
+For example, in an application that transfers funds from one account to another, the consistency property ensures that the total value of funds in both the accounts is the same at the start and end of each transaction.
+- Isolation
+The intermediate state of a transaction is invisible to other transactions. As a result, transactions that run concurrently appear to be serialized.
+For example, in an application that transfers funds from one account to another, the isolation property ensures that another transaction sees the transferred funds in one account or the other, but not in both, nor in neither.
+- Durability
+After a transaction successfully completes, changes to data persist and are not undone, even in the event of a system failure.
+For example, in an application that transfers funds from one account to another, the durability property ensures that the changes made to each account will not be reversed.
+
+6. Explain each of the table relationships and give an example for each
+'https://medium.com/@emekadc/how-to-implement-one-to-one-one-to-many-
+and-many-to-many-relationships-when-designing-a-database-9da2de684710'
+ - One-to-One
+When a row in a table is related to only one role in another table and vice versa,we say
+ that is a one to one relationship. This relationship can be created using Primary key-Unique foreign key constraints.
+For instance a Country can only have one UN Representative, and also a UN Representative
+can only represent one Country.
+ - One-to-Many
+One to Many Relationship (1:M)
+This is where a row from one table can have multiple matching rows in another table this relationship is defined as a one to many relationship.
+This type of relationship can be created using Primary key-Foreign key relationship.
+This kind of Relationship, allows a Car to have multiple Engineers.
+ - Many-to-Many
+A row from one table can have multiple matching rows in another table, and a row in the other table can also have
+multiple matching rows in the first table this relationship is defined as a many to many relationship.
+This type of relationship can be created using a third table called “Junction table” or “Bridging table”.
+This Junction or Bridging table can be assumed as a place where attributes of the relationships between two
+lists of entities are stored.
+This kind of Relationship, allows a junction or bridging table as a connection for the two tables.
+
+## Syntax
+For the following section, give a brief explanation of each of the SQL commands.
+
+1. **SELECT** -
+2. **WHERE** -
+3. **LIMIT** -
+4. **ORDER** -
+5. **JOIN** -
+6. **CREATE TABLE** -
+7. **INSERT** -
+8. **DISTINCT** -
+9. **GROUP BY** -
+10. **ORDER BY** -
+11. **AVG** -
+12. **MAX** -
+13. **AS** -
+
+## Starting From Scratch
+Create a file named `study_part1.py` and complete the exercise below. The only library you should need to import is `sqlite3`. Don't forget to be PEP8 compliant!
+1. Create a new database file call `study_part1.sqlite3`
+2. Create a table with the following columns
+ ```
+ student - string
+ studied - string
+ grade - int
+ age - int
+ sex - string
+ ```
+
+3. Fill the table with the following data
+
+ ```
+ 'Lion-O', 'True', 85, 24, 'Male'
+ 'Cheetara', 'True', 95, 22, 'Female'
+ 'Mumm-Ra', 'False', 65, 153, 'Male'
+ 'Snarf', 'False', 70, 15, 'Male'
+ 'Panthro', 'True', 80, 30, 'Male'
+ ```
+
+4. Save your data. You can check that everything is working so far if you can view the table and data in DBBrowser
+
+5. Write the following queries to check your work. Querie outputs should be formatted for readability, don't simply print a number to the screen with no explanation, add context.
+
+ ```
+ What is the average age? Expected Result - 48.8
+ What are the name of the female students? Expected Result - 'Cheetara'
+ How many students studied? Expected Results - 3
+ Return all students and all columns, sorted by student names in alphabetical order.
+ ```
+
+## Query All the Tables!
+
+### Setup
+Before we get started you'll need a few things.
+1. Download the [Chinook Database here](https://github.com/bundickm/Study-Guides/blob/master/data/Chinook_Sqlite.sqlite)
+2. The schema can be [found here](https://github.com/bundickm/Study-Guides/blob/master/data/Chinook%20Schema.png)
+3. Create a file named `study_part2.py` and complete the exercise below. The only library you should need to import is `sqlite3`. Don't forget to be PEP8 compliant!
+4. Add a connection to the chinook database so that you can answer the queries below.
+
+### Queries
+**Single Table Queries**
+1. Find the average invoice total for each customer, return the details for the first 5 ID's
+2. Return all columns in Customer for the first 5 customers residing in the United States
+3. Which employee does not report to anyone?
+4. Find the number of unique composers
+5. How many rows are in the Track table?
+
+**Joins**
+
+6. Get the name of all Black Sabbath tracks and the albums they came off of
+7. What is the most popular genre by number of tracks?
+8. Find all customers that have spent over $45
+9. Find the first and last name, title, and the number of customers each employee has helped. If the customer count is 0 for an employee, it doesn't need to be displayed. Order the employees from most to least customers.
+10. Return the first and last name of each employee and who they report to
+
+## NoSQL
+https://www.unitedglobalgrp.com/uncategorized/to-sql-or-to-nosql/#:~:text=The%20most%20significant%20trade%2Doff,systems%20%E2%80%93%20i.e.%20relational%20databases%20vs.&text=NoSQL%20databases%20do%20not%20require,spent%20preparing%20data%20%5B7%5D.
+Unless u have a reason to scale -- go with traditional sql
+If u have relational data go with postgress
+
+
+### Questions of Understanding
+
+1. What is a document store?
+https://www.mongodb.com/document-databases
+
+What is a Document Database?
+Built around JSON-like documents, document databases are both natural and flexible for developers to work with. They promise higher developer productivity,
+ and faster evolution with application needs. As a class of non-relational, sometimes called NoSQL database,
+the document data model has become the most popular alternative to tabular, relational databases.
+
+1. Intuitive Data Model: Faster and Easier for Developers
+Documents map to the objects in your code, so they are much more natural to work with. There is no need to decompose
+data across tables, run expensive JOINs, or integrate a separate ORM layer.
+ Data that is accessed together is stored together,
+so you have less code to write and your users get higher performance.
+
+2. What is a `key:value` pair? What data type in Python uses `key:value` pairs?
+
+3. Give an example of when it would be best to use a SQL Database and when it would be best to use a NoSQL Database
+
+4. What are some of the trade-offs between SQL and NoSQL?
+The most significant trade-off between SQL and NoSQL systems – i.e. relational databases vs. "everything else" – is the security and trustworthiness of vital, operational data for the agility,
+scalability and flexibility of big data.
+ Relational databases are specialized to structure data in a specific,
+well-defined, well-organized model [9]. Because they fully support ACID principles, transactions are not only highly reliable, the database also guarantees crash recovery.
+The security risks that do exist are defined, and new research and product iterations continually improve against these gaps [9].
+ However, the same fail-saves that guarantee data also restrain performance [6]. The relational database stores multiple copies of data, which is centralized and unencrypted;
+ in this way, the RDBMS is both inefficient and vulnerable to fraud, error and security attacks [3].
+
+NoSQL databases do not require pre-defined schema, relationships or keys; less complex models translate to less time spent preparing data [7].
+ Additionally, that NoSQL systems don’t fully support ACID principles also translates into faster performance
+ of storing and retrieving data [7]. Theses performance-focused design features lend themselves particularly to the manipulation of Big Data.
+ At the same time, however, their design leaves security as an afterthought [9].
+ Because NoSQL systems cannot be evaluated using ACID, transaction reliability is not natively assured [5, 6, 9].
+ Where ACID principles are programmed into NoSQL systems, designers face a performance vs. consistency
+ trade-off – i.e. performance is negatively affected [6]. Further, there is some research that indicates a wide variation
+ in the distribution of NoSQL database performance measures based on the type of operation
+ performed, as well as the number of
+ synchronous users [8]. Without ACID support, changes made in close proximity
+ can overwrite other changes made within the database, especially if they occur close in time.
+
+5. What does each letter in BASE stand for? Give an explanation for each and why they matter?
+ - B
+ - A
+ - S
+ - E
diff --git a/Study_Guide_Resources/example_db.sqlite b/Study_Guide_Resources/example_db.sqlite
new file mode 100644
index 00000000..483be88a
Binary files /dev/null and b/Study_Guide_Resources/example_db.sqlite differ
diff --git a/Study_Guide_Resources/rpg_db.sqlite3 b/Study_Guide_Resources/rpg_db.sqlite3
new file mode 100644
index 00000000..837d7f16
Binary files /dev/null and b/Study_Guide_Resources/rpg_db.sqlite3 differ
diff --git a/Study_Guide_Resources/schema.png b/Study_Guide_Resources/schema.png
new file mode 100644
index 00000000..b3a99ee2
Binary files /dev/null and b/Study_Guide_Resources/schema.png differ
diff --git a/module1-introduction-to-sql/.DS_Store b/module1-introduction-to-sql/.DS_Store
new file mode 100644
index 00000000..4e48db2c
Binary files /dev/null and b/module1-introduction-to-sql/.DS_Store differ
diff --git a/module1-introduction-to-sql/.idea/.gitignore b/module1-introduction-to-sql/.idea/.gitignore
new file mode 100644
index 00000000..73f69e09
--- /dev/null
+++ b/module1-introduction-to-sql/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
+# Editor-based HTTP Client requests
+/httpRequests/
diff --git a/module1-introduction-to-sql/.idea/dataSources.xml b/module1-introduction-to-sql/.idea/dataSources.xml
new file mode 100644
index 00000000..ed7e9b5d
--- /dev/null
+++ b/module1-introduction-to-sql/.idea/dataSources.xml
@@ -0,0 +1,11 @@
+
+
+
+
+ sqlite.xerial
+ true
+ org.sqlite.JDBC
+ jdbc:sqlite:$PROJECT_DIR$/rpg_db.sqlite3
+
+
+
\ No newline at end of file
diff --git a/module1-introduction-to-sql/.idea/inspectionProfiles/profiles_settings.xml b/module1-introduction-to-sql/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 00000000..105ce2da
--- /dev/null
+++ b/module1-introduction-to-sql/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module1-introduction-to-sql/.idea/misc.xml b/module1-introduction-to-sql/.idea/misc.xml
new file mode 100644
index 00000000..ee16292f
--- /dev/null
+++ b/module1-introduction-to-sql/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/module1-introduction-to-sql/.idea/module1-introduction-to-sql.iml b/module1-introduction-to-sql/.idea/module1-introduction-to-sql.iml
new file mode 100644
index 00000000..ddda5d89
--- /dev/null
+++ b/module1-introduction-to-sql/.idea/module1-introduction-to-sql.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module1-introduction-to-sql/.idea/modules.xml b/module1-introduction-to-sql/.idea/modules.xml
new file mode 100644
index 00000000..d53bc76e
--- /dev/null
+++ b/module1-introduction-to-sql/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module1-introduction-to-sql/.idea/sqldialects.xml b/module1-introduction-to-sql/.idea/sqldialects.xml
new file mode 100644
index 00000000..d4d2420f
--- /dev/null
+++ b/module1-introduction-to-sql/.idea/sqldialects.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module1-introduction-to-sql/.idea/vcs.xml b/module1-introduction-to-sql/.idea/vcs.xml
new file mode 100644
index 00000000..6c0b8635
--- /dev/null
+++ b/module1-introduction-to-sql/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module1-introduction-to-sql/DF_to_SQL.py b/module1-introduction-to-sql/DF_to_SQL.py
new file mode 100644
index 00000000..d9a06cae
--- /dev/null
+++ b/module1-introduction-to-sql/DF_to_SQL.py
@@ -0,0 +1,27 @@
+import pandas as pd
+import sqlite3
+
+df = pd.read_csv('https://archive.ics.uci.edu/ml/machine-learning-databases/00476/buddymove_holidayiq.csv')
+df = df.rename(columns={'User Id': 'UserId'})
+
+# CREATING DATA BASE
+conn = sqlite3.connect('Reviews.db')
+c = conn.cursor()
+# Create TABLE
+c.execute('CREATE TABLE REVIEWS (UserId text, Sports number,Religious number, Nature number,'
+ 'Theatre number, Shopping number, Picnic number)')
+# Saving Created Table into our Data Base
+conn.commit()
+
+
+# Convert DF to SQL
+df.to_sql('REVIEWS', conn, if_exists='replace', index=False)
+
+c.execute("""
+SELECT * FROM REVIEWS
+
+
+""")
+
+for row in c.fetchall():
+ print(row)
diff --git a/module1-introduction-to-sql/Pipfile b/module1-introduction-to-sql/Pipfile
new file mode 100644
index 00000000..b723d019
--- /dev/null
+++ b/module1-introduction-to-sql/Pipfile
@@ -0,0 +1,11 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/module1-introduction-to-sql/Pipfile.lock b/module1-introduction-to-sql/Pipfile.lock
new file mode 100644
index 00000000..9a51a282
--- /dev/null
+++ b/module1-introduction-to-sql/Pipfile.lock
@@ -0,0 +1,20 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "7e7ef69da7248742e869378f8421880cf8f0017f96d94d086813baa518a65489"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {}
+}
diff --git a/module1-introduction-to-sql/README.md b/module1-introduction-to-sql/README.md
index 40497956..ac57e6bf 100644
--- a/module1-introduction-to-sql/README.md
+++ b/module1-introduction-to-sql/README.md
@@ -52,13 +52,34 @@ Use `sqlite3` to load and write queries to explore the data, and answer the
following questions:
- How many total Characters are there?
+ Answer: 302
+
- How many of each specific subclass?
+Cleric = 75
+Fighters = 68
+Mage = 68
+Necromancer = 11
+Thief = 51
+
- How many total Items?
+ Answer: 174 Items
+
- How many of the Items are weapons? How many are not?
+ Answer: 37 weapons, non_weapons
+
- How many Items does each character have? (Return first 20 rows)
+
+Answered as a Query
+
- How many Weapons does each character have? (Return first 20 rows)
+
+Answered as Query
+
- On average, how many Items does each Character have?
+2.9735099337748343
+
- On average, how many Weapons does each character have?
+1.3096774193548386
You do not need all the tables - in particular, the `account_*`, `auth_*`,
`django_*`, and `socialaccount_*` tables are for the application and do not have
@@ -77,7 +98,7 @@ Some of these queries are challenging - that's OK! You can keep working on them
tomorrow as well (we'll visit loading the same data into PostgreSQL). It's also
OK to figure out the results partially with a query and partially with a bit of
logic or math afterwards, though doing things purely with SQL is a good goal.
-[Subqueries](https://www.w3resource.com/sql/subqueries/understanding-sql-subqueries.php)
+ [Subqueries](https://www.w3resource.com/sql/subqueries/understanding-sql-subqueries.php)
and [aggregation functions](https://www.sqltutorial.org/sql-aggregate-functions/)
may be helpful for putting together more complicated queries.
@@ -100,10 +121,13 @@ Using the standard `sqlite3` module:
Then write the following queries (also with `sqlite3`) to test:
- Count how many rows you have - it should be 249!
+- Verified
- How many users who reviewed at least 100 `Nature` in the category also
reviewed at least 100 in the `Shopping` category?
+- 78 Users
- (*Stretch*) What are the average number of reviews for each category?
+
Your code (to reproduce all above steps) should be saved in
`buddymove_holidayiq.py`, and added to the repository along with the generated
SQLite database.
diff --git a/module1-introduction-to-sql/Reviews.db b/module1-introduction-to-sql/Reviews.db
new file mode 100644
index 00000000..447d74a5
Binary files /dev/null and b/module1-introduction-to-sql/Reviews.db differ
diff --git a/module1-introduction-to-sql/buddymove_holiday_queries.py b/module1-introduction-to-sql/buddymove_holiday_queries.py
new file mode 100644
index 00000000..1280b35f
--- /dev/null
+++ b/module1-introduction-to-sql/buddymove_holiday_queries.py
@@ -0,0 +1,43 @@
+import sqlite3
+
+
+def connect_to_db(db_name='Reviews.db'):
+ return sqlite3.connect(db_name)
+
+
+def execute_query(cursor, query):
+ cursor.execute(query)
+ return cursor.fetchall()
+
+
+GET_ROWS = """
+ SELECT COUNT(*)
+ FROM REVIEWS;
+
+"""
+GET_REVIEWS_LEAST_100 = """
+SELECT COUNT(*)
+FROM REVIEWS
+WHERE Nature >= 100 and Shopping >=100
+;
+
+
+"""
+
+GET_AVG_FOR_EACH_REVIEW = """
+
+SELECT AVG(Sports), AVG(Religious),
+AVG(Nature), AVG(Theatre), AVG(Shopping), AVG(Picnic)
+FROM REVIEWS;
+
+"""
+
+if __name__ == '__main__':
+ conn = connect_to_db()
+ curs = conn.cursor()
+
+ reviews_least_100 = execute_query(curs, GET_REVIEWS_LEAST_100)
+ print('Reviews at least 100 Nature and Shopping', reviews_least_100)
+
+ average_for_reviews = execute_query(curs, GET_AVG_FOR_EACH_REVIEW)
+ print('Averages for Each Review', average_for_reviews)
diff --git a/module1-introduction-to-sql/rpg_queries.py b/module1-introduction-to-sql/rpg_queries.py
new file mode 100644
index 00000000..d4e3df22
--- /dev/null
+++ b/module1-introduction-to-sql/rpg_queries.py
@@ -0,0 +1,143 @@
+import sqlite3
+
+
+def connect_to_db(db_name='rpg_db.sqlite3'):
+ return sqlite3.connect(db_name)
+
+
+def execute_query(cursor, query):
+ cursor.execute(query)
+ return cursor.fetchall()
+
+
+# QUERIES
+GET_TOTAL_CHARACTERS = """
+ SELECT COUNT(*)
+ FROM charactercreator_character;
+"""
+
+GET_CHARACTERS_CLERIC = """
+ SELECT COUNT(*)
+ FROM charactercreator_cleric;
+
+"""
+
+GET_CHARACTERS_FIGHTER = """
+ SELECT COUNT(*)
+ FROM charactercreator_fighter
+"""
+
+GET_CHARACTERS_MAGE = """
+ SELECT COUNT(*)
+ FROM charactercreator_fighter
+"""
+GET_CHARACTERS_NECROMANCER = """
+ SELECT COUNT(*)
+ FROM charactercreator_necromancer
+"""
+GET_CHARACTERS_THIEF = """
+ SELECT COUNT(*)
+ FROM charactercreator_thief
+"""
+
+GET_TOTAL_ITEMS = """
+ SELECT COUNT(*)
+ FROM armory_item ;
+
+"""
+GET_WEAPONS = """
+
+ SELECT COUNT(*)
+ FROM armory_weapon AS aw, armory_item as ai
+ WHERE ai.item_id = aw.item_ptr_id;
+
+"""
+GET_ITEMS_PER_CHARACTER = """
+ SELECT cc.name, COUNT(*)
+ FROM charactercreator_character AS cc,
+ armory_item AS ai, charactercreator_character_inventory AS cci
+ WHERE cc.character_id = cci.character_id AND cci.item_id = ai.item_id
+ GROUP BY cc.character_id
+ ORDER BY 1 DESC
+ LIMIT 20;
+
+"""
+
+GET_WEAPONS_PER_CHARACTER = """
+ SELECT cc.name, COUNT(*)
+ FROM charactercreator_character AS cc,
+ armory_weapon AS aw, charactercreator_character_inventory AS cci
+ WHERE cc.character_id = cci.character_id AND cci.item_id = aw.item_ptr_id
+ GROUP BY cc.character_id
+ ORDER BY 1 DESC
+ LIMIT 20;
+"""
+
+GET_NON_WEAPONS = """
+ SELECT COUNT(*)
+ FROM armory_item
+ WHERE armory_item.item_id NOT IN
+ (SELECT armory_weapon.item_ptr_id
+ FROM armory_weapon)
+
+"""
+
+AVG_ITEMS_PER_CHARACTER = """
+ SELECT AVG(num_item)
+ FROM (SELECT character_id, character_name,
+COUNT(DISTINCT item_id) AS num_item FROM
+(SELECT cc.character_id, cc.name AS character_name, ai.item_id, ai.name AS item_name
+FROM charactercreator_character
+AS cc, armory_item AS ai, charactercreator_character_inventory
+AS cci WHERE cc.character_id = cci.character_id
+AND ai.item_id = cci.item_id) GROUP BY 1);
+"""
+AVG_WEAPONS_PER_CHARACTER = """
+ SELECT AVG(num_weapons)
+ FROM(SELECT character_id, character_name, COUNT(DISTINCT aw_name) AS num_weapons
+ FROM (SELECT cc.character_id, cc.name AS character_name, aw.item_ptr_id AS aw_name
+ FROM charactercreator_character
+ AS cc, armory_weapon AS aw, charactercreator_character_inventory
+ AS cci WHERE cc.character_id = cci.character_id
+ AND aw.item_ptr_id = cci.item_id)
+ GROUP BY 1)
+"""
+
+
+if __name__ == '__main__':
+ conn = connect_to_db()
+ curs = conn.cursor()
+
+ total_characters = curs.execute(GET_TOTAL_CHARACTERS)
+ total_c = curs.fetchall()
+ print('total characters', total_c)
+
+ number_cleric = execute_query(curs, GET_CHARACTERS_CLERIC)
+ number_fighter = execute_query(curs, GET_CHARACTERS_FIGHTER)
+ print('total cleric', number_cleric, 'total fighter', number_fighter)
+ number_mage = execute_query(curs, GET_CHARACTERS_MAGE)
+ number_necromancer = execute_query(curs, GET_CHARACTERS_NECROMANCER)
+ print('total mage', number_mage, 'total necromancer', number_necromancer)
+ number_thief = execute_query(curs, GET_CHARACTERS_THIEF)
+ print('total thief', number_thief)
+
+ total_items = execute_query(curs, GET_TOTAL_ITEMS)
+ print('total items', total_items)
+
+ total_weapons = execute_query(curs, GET_WEAPONS)
+ print('total weapons', total_weapons)
+
+ non_weapons = execute_query(curs, GET_NON_WEAPONS)
+ print('non weapons', non_weapons)
+
+ items_per_character = execute_query(curs, GET_ITEMS_PER_CHARACTER)
+ print('items per character', items_per_character)
+
+ weapons_per_character = execute_query(curs, GET_WEAPONS_PER_CHARACTER)
+ print('weapons per character', weapons_per_character)
+
+ average_items_per_character = execute_query(curs, AVG_ITEMS_PER_CHARACTER)
+ print('average items per character', average_items_per_character)
+
+ average_weapons_per_character = execute_query(curs, AVG_WEAPONS_PER_CHARACTER)
+ print('average weapons_per_character', average_weapons_per_character)
diff --git a/module2-sql-for-analysis/.idea/.gitignore b/module2-sql-for-analysis/.idea/.gitignore
new file mode 100644
index 00000000..73f69e09
--- /dev/null
+++ b/module2-sql-for-analysis/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
+# Editor-based HTTP Client requests
+/httpRequests/
diff --git a/module2-sql-for-analysis/.idea/inspectionProfiles/profiles_settings.xml b/module2-sql-for-analysis/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 00000000..105ce2da
--- /dev/null
+++ b/module2-sql-for-analysis/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module2-sql-for-analysis/.idea/misc.xml b/module2-sql-for-analysis/.idea/misc.xml
new file mode 100644
index 00000000..8f632a08
--- /dev/null
+++ b/module2-sql-for-analysis/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/module2-sql-for-analysis/.idea/module2-sql-for-analysis.iml b/module2-sql-for-analysis/.idea/module2-sql-for-analysis.iml
new file mode 100644
index 00000000..d0876a78
--- /dev/null
+++ b/module2-sql-for-analysis/.idea/module2-sql-for-analysis.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module2-sql-for-analysis/.idea/modules.xml b/module2-sql-for-analysis/.idea/modules.xml
new file mode 100644
index 00000000..360a6d65
--- /dev/null
+++ b/module2-sql-for-analysis/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module2-sql-for-analysis/.idea/sqldialects.xml b/module2-sql-for-analysis/.idea/sqldialects.xml
new file mode 100644
index 00000000..444ae2fc
--- /dev/null
+++ b/module2-sql-for-analysis/.idea/sqldialects.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module2-sql-for-analysis/.idea/vcs.xml b/module2-sql-for-analysis/.idea/vcs.xml
new file mode 100644
index 00000000..6c0b8635
--- /dev/null
+++ b/module2-sql-for-analysis/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module2-sql-for-analysis/ETL.py b/module2-sql-for-analysis/ETL.py
new file mode 100644
index 00000000..cfe4f272
--- /dev/null
+++ b/module2-sql-for-analysis/ETL.py
@@ -0,0 +1,120 @@
+# ETL - Extract Transform Load
+# Step1 - Extract, get data out of SQLITE
+# Lets focus on character data
+
+import sqlite3
+import psycopg2
+
+dbname = 'ppezxvjc'
+user = 'ppezxvjc'
+password = 't0tlBYAiZvucD-MTqJAG2SPT87DZbVnS' # Don't commit
+host = 'isilo.db.elephantsql.com'
+
+pg_conn = psycopg2.connect(dbname=dbname, user=user, password=password, host=host)
+pg_curs = pg_conn.cursor()
+
+sl_conn = sqlite3.connect('rpg_db.sqlite3')
+sl_curs = sl_conn.cursor()
+
+get_characters = "SELECT * FROM charactercreator_character;"
+sl_curs.execute(get_characters)
+characters = sl_curs.fetchall()
+print(len(characters))
+
+# Slice the first five rows
+print(f'characters {characters[:5]}')
+
+# Step 1 complete, we have a tuple with all our character data
+
+# Step 2 - Transform
+# Our goal is to make a schema to define a table that fits the data
+sl_curs.execute('PRAGMA table_info(charactercreator_character);')
+print(sl_curs.fetchall())
+
+create_character_table = """
+CREATE TABLE if not exists charactercreator_character (
+ character_id SERIAL PRIMARY KEY,
+ name VARCHAR(30),
+ level INT,
+ exp INT,
+ hp INT,
+ strength INT,
+ intelligence INT,
+ dexterity INT,
+ wisdom INT
+);
+"""
+
+
+# Defining a function to refresh connection and cursor
+def refresh_connection_and_cursor(conn, curs):
+ curs.close()
+ conn.close()
+ pg_conn = psycopg2.connect(dbname=dbname, user=user,
+ password=password, host=host)
+ pg_curs = pg_conn.cursor()
+ return pg_conn, pg_curs
+
+
+pg_conn, pg_curs = refresh_connection_and_cursor(pg_conn, pg_curs)
+print(create_character_table)
+
+# Execute the created table
+pg_curs.execute(create_character_table)
+pg_conn.commit()
+
+# PostgreSQL comparison to the SQLite pragma
+# We can query tables if we want to check
+# This is a clever optional thing, showing postgresql internals
+show_tables = """
+SELECT
+ *
+FROM
+ pg_catalog.pg_tables
+WHERE
+ schemaname != 'pg_catalog'
+AND schemaname != 'information_schema';
+"""
+print(pg_curs.execute(show_tables))
+print(pg_curs.fetchall())
+
+# Done with step 2 (transform)
+# Step 3 - Load!
+print(characters[0])
+print(characters[0][1:])
+
+example_insert = """
+INSERT INTO charactercreator_character
+(name, level, exp, hp, strength, intelligence, dexterity, wisdom)
+VALUES """ + str(characters[0][1:]) + ";"
+
+print(example_insert)
+
+for character in characters:
+ insert_character = """
+ INSERT INTO charactercreator_character
+ (name, level, exp, hp, strength, intelligence, dexterity, wisdom)
+ VALUES """ + str(character[1:]) + ";"
+ pg_curs.execute(insert_character)
+
+pg_conn.commit()
+
+# Let's look at what we've done
+print(pg_curs.execute('SELECT * FROM charactercreator_character LIMIT 5;'))
+print(pg_curs.fetchall())
+
+# Now the data looks the same! But let's check it systematically
+pg_curs.execute('SELECT * FROM charactercreator_character;')
+pg_characters = pg_curs.fetchall()
+
+# We could do more spot checks, but let's loop and check them all
+# TODO/afternoon task - consider making this a more formal test
+for character, pg_character in zip(characters, pg_characters):
+ assert character == pg_character
+
+# No complaints - which means they're all the same!
+# Closing out cursor/connection to wrap up
+pg_curs.close()
+pg_conn.close()
+sl_curs.close()
+sl_conn.close()
diff --git a/module2-sql-for-analysis/Insert_Titanic.py b/module2-sql-for-analysis/Insert_Titanic.py
new file mode 100644
index 00000000..4843705a
--- /dev/null
+++ b/module2-sql-for-analysis/Insert_Titanic.py
@@ -0,0 +1,122 @@
+import sqlite3
+import psycopg2
+
+# Establish Connection W/ DataBase
+dbname = 'ppezxvjc'
+user = 'ppezxvjc'
+password = 't0tlBYAiZvucD-MTqJAG2SPT87DZbVnS' # Don't commit
+host = 'isilo.db.elephantsql.com'
+
+pg_conn = psycopg2.connect(dbname=dbname, user=user, password=password, host=host)
+pg_curs = pg_conn.cursor()
+
+# Get Data Out of SQLite
+sl_conn = sqlite3.connect('Titanic3.db')
+sl_curs = sl_conn.cursor()
+
+# Extract All Desired Observations/Rows
+get_people = "SELECT * FROM TITANIC"
+sl_curs.execute(get_people)
+people = sl_curs.fetchall()
+# Check Length
+print(len(people))
+
+# Print first 5 Rows
+print(f'people {people[:5]}')
+
+# Step1 - Completed, we have data in people
+
+# Step2 - Transform
+# Goal is to make a schema to define a table that fits the data
+sl_curs.execute('PRAGMA table_info(TITANIC);')
+print(sl_curs.fetchall())
+
+create_TITANIC_table = """
+DROP TABLE if exists TITANIC;
+CREATE TABLE if not exists TITANIC (
+ Survived INT ,
+ Pclass INT,
+ Name VARCHAR(100),
+ Sex VARCHAR(30),
+ Age FlOAT,
+ Siblings_Spouses INT,
+ Parents_Children INT,
+ Fare FlOAT
+ );
+ """
+
+
+# Defining a function to refresh connection and cursor
+def refresh_connection_and_cursor(conn, curs):
+ curs.close()
+ conn.close()
+ pg_conn = psycopg2.connect(dbname=dbname, user=user,
+ password=password, host=host)
+ pg_curs = pg_conn.cursor()
+ return pg_conn, pg_curs
+
+
+pg_conn, pg_curs = refresh_connection_and_cursor(pg_conn, pg_curs)
+pg_curs.execute(create_TITANIC_table)
+pg_conn.commit()
+
+pg_curs.execute('DROP TABLE if exists TITANIC3,TITANIC4,TITANIC5,TITANIC6')
+pg_conn.commit()
+
+# PostgreSQL comparison to the SQLite pragma
+# We can query tables if we want to check
+# This is a clever optional thing, showing postgresql internals
+show_tables = """
+SELECT
+ *
+FROM
+ pg_catalog.pg_tables
+WHERE
+ schemaname != 'pg_catalog'
+AND schemaname != 'information_schema';
+"""
+print('tables', pg_curs.execute(show_tables))
+print(pg_curs.fetchall())
+
+# Done with step 2 (transform)
+# Step 3 - Load!
+print(people[0])
+print(people[0][1:])
+example_insert = """
+INSERT INTO TITANIC
+(Survived, Pclass, Name, Sex, Age, Siblings_Spouses, Parents_Children,Fare)
+VALUES """ + str(people[0][0:]) + ";"
+
+for person in people:
+ if "'" in person[2]:
+ person = list(person)
+ person[2] = person[2].replace("'", "")
+ person = tuple(person)
+ insert_person = """
+ INSERT INTO TITANIC
+ (Survived, Pclass,Name, Sex, Age, Siblings_Spouses, Parents_Children, Fare)
+ VALUES """ + str(person[0:]) + ";"
+ pg_curs.execute(insert_person)
+
+pg_conn.commit()
+
+# Let's look at what we've done first
+print(pg_curs.execute('SELECT * FROM TITANIC LIMIT 10;'))
+pg_curs.fetchall()
+
+# Now the data looks the same! But let's check it systematically
+pg_curs.execute('SELECT * FROM TITANIC;')
+pg_people = pg_curs.fetchall()
+
+# TESTS
+# We could do more spot checks, but let's loop and check them all
+# # TODO/afternoon task - consider making this a more formal test
+# for person, pg_person in zip(people, pg_people):
+# assert person == pg_person
+
+# No complaints - which means they're all the same!
+# Closing out cursor/connection to wrap up
+pg_curs.close()
+pg_conn.close()
+sl_curs.close()
+sl_conn.close()
diff --git a/module2-sql-for-analysis/Pipfile b/module2-sql-for-analysis/Pipfile
new file mode 100644
index 00000000..b723d019
--- /dev/null
+++ b/module2-sql-for-analysis/Pipfile
@@ -0,0 +1,11 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/module2-sql-for-analysis/Pipfile.lock b/module2-sql-for-analysis/Pipfile.lock
new file mode 100644
index 00000000..9a51a282
--- /dev/null
+++ b/module2-sql-for-analysis/Pipfile.lock
@@ -0,0 +1,20 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "7e7ef69da7248742e869378f8421880cf8f0017f96d94d086813baa518a65489"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {}
+}
diff --git a/module2-sql-for-analysis/PostGreSQL.py b/module2-sql-for-analysis/PostGreSQL.py
new file mode 100644
index 00000000..ca43f5bc
--- /dev/null
+++ b/module2-sql-for-analysis/PostGreSQL.py
@@ -0,0 +1,40 @@
+import psycopg2
+
+dir(psycopg2
+ )
+
+dbname = 'ppezxvjc'
+user = 'ppezxvjc'
+password = 't0tlBYAiZvucD-MTqJAG2SPT87DZbVnS' # Don't commit
+host = 'isilo.db.elephantsql.com'
+
+pg_conn = psycopg2.connect(dbname=dbname, user=user, password=password, host=host)
+
+pg_curs = pg_conn.cursor()
+# help(pg_curs.execute)
+
+# were connected, lets see what is the db
+pg_curs.execute('SELECT * FR'
+ 'OM test_table;')
+print(pg_curs.fetchall())
+
+# Add another insert statement
+insert_statement = """
+INSERT INTO test_table (name, data) VALUES
+(
+ 'pokemon',
+ '{"key": "value","key2" : 2}'::JSONB
+) ;
+
+"""
+
+pg_curs.execute(insert_statement)
+pg_conn.commit()
+
+pg_curs.execute('SELECT * FROM test_table;')
+print(pg_curs.fetchall())
+
+# closing cursor
+pg_curs.close()
+# pg_conn.close() # If we were really done
+
diff --git a/module2-sql-for-analysis/Titanic.db b/module2-sql-for-analysis/Titanic.db
new file mode 100644
index 00000000..2a1bbafa
Binary files /dev/null and b/module2-sql-for-analysis/Titanic.db differ
diff --git a/module2-sql-for-analysis/Titanic1.db b/module2-sql-for-analysis/Titanic1.db
new file mode 100644
index 00000000..e69de29b
diff --git a/module2-sql-for-analysis/Titanic2.db b/module2-sql-for-analysis/Titanic2.db
new file mode 100644
index 00000000..5da5c1ee
Binary files /dev/null and b/module2-sql-for-analysis/Titanic2.db differ
diff --git a/module2-sql-for-analysis/Titanic3.db b/module2-sql-for-analysis/Titanic3.db
new file mode 100644
index 00000000..2a1bbafa
Binary files /dev/null and b/module2-sql-for-analysis/Titanic3.db differ
diff --git a/module2-sql-for-analysis/TitanicDF_to_SQL.py b/module2-sql-for-analysis/TitanicDF_to_SQL.py
new file mode 100644
index 00000000..a014a725
--- /dev/null
+++ b/module2-sql-for-analysis/TitanicDF_to_SQL.py
@@ -0,0 +1,27 @@
+import pandas as pd
+import sqlite3
+
+pd.set_option('display.max_columns', 500)
+
+# Create data frame
+df = pd.read_csv('titanic.csv')
+df = df.rename(columns={'Siblings/Spouses Aboard': 'Siblings_Spouses', 'Parents/Children Aboard': 'Parents_Children'})
+print(df.head())
+print(df.info())
+print(df.describe())
+# Create Table
+conn = sqlite3.connect('Titanic3.db')
+c = conn.cursor()
+# This is in sqllite, the data types maybe be different in POSTGRESQL,
+# EX, IN SQLLITE IT TAKES TEXT AS CHAR VS IN PSTGRE IT TAKES DATA TYPE
+# AS VARCHAR
+c.execute(
+ 'CREATE TABLE TITANIC (Survived INT , Pclass number, Name text,' \
+ ' Sex text, Age float, Siblings_Spouses number,Parents_Children number, Fare float)')
+
+df.to_sql('TITANIC', conn, if_exists='replace', index=False)
+
+c.execute("SELECT * FROM TITANIC;")
+
+for row in c.fetchall():
+ print(row)
diff --git a/module2-sql-for-analysis/rpg_db.sqlite3 b/module2-sql-for-analysis/rpg_db.sqlite3
new file mode 100644
index 00000000..837d7f16
Binary files /dev/null and b/module2-sql-for-analysis/rpg_db.sqlite3 differ
diff --git a/module3-nosql-and-document-oriented-databases/.DS_Store b/module3-nosql-and-document-oriented-databases/.DS_Store
new file mode 100644
index 00000000..e6217005
Binary files /dev/null and b/module3-nosql-and-document-oriented-databases/.DS_Store differ
diff --git a/module3-nosql-and-document-oriented-databases/.idea/.gitignore b/module3-nosql-and-document-oriented-databases/.idea/.gitignore
new file mode 100644
index 00000000..73f69e09
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/.idea/.gitignore
@@ -0,0 +1,8 @@
+# Default ignored files
+/shelf/
+/workspace.xml
+# Datasource local storage ignored files
+/dataSources/
+/dataSources.local.xml
+# Editor-based HTTP Client requests
+/httpRequests/
diff --git a/module3-nosql-and-document-oriented-databases/.idea/inspectionProfiles/Project_Default.xml b/module3-nosql-and-document-oriented-databases/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 00000000..5cb71ef0
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module3-nosql-and-document-oriented-databases/.idea/inspectionProfiles/profiles_settings.xml b/module3-nosql-and-document-oriented-databases/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 00000000..105ce2da
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module3-nosql-and-document-oriented-databases/.idea/misc.xml b/module3-nosql-and-document-oriented-databases/.idea/misc.xml
new file mode 100644
index 00000000..4e7f49c3
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/.idea/misc.xml
@@ -0,0 +1,4 @@
+
+
+
+
\ No newline at end of file
diff --git a/module3-nosql-and-document-oriented-databases/.idea/module3-nosql-and-document-oriented-databases.iml b/module3-nosql-and-document-oriented-databases/.idea/module3-nosql-and-document-oriented-databases.iml
new file mode 100644
index 00000000..e2cad7f9
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/.idea/module3-nosql-and-document-oriented-databases.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module3-nosql-and-document-oriented-databases/.idea/modules.xml b/module3-nosql-and-document-oriented-databases/.idea/modules.xml
new file mode 100644
index 00000000..223865fd
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module3-nosql-and-document-oriented-databases/.idea/vcs.xml b/module3-nosql-and-document-oriented-databases/.idea/vcs.xml
new file mode 100644
index 00000000..6c0b8635
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/module3-nosql-and-document-oriented-databases/Pipfile b/module3-nosql-and-document-oriented-databases/Pipfile
new file mode 100644
index 00000000..b723d019
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/Pipfile
@@ -0,0 +1,11 @@
+[[source]]
+name = "pypi"
+url = "https://pypi.org/simple"
+verify_ssl = true
+
+[dev-packages]
+
+[packages]
+
+[requires]
+python_version = "3.7"
diff --git a/module3-nosql-and-document-oriented-databases/Pipfile.lock b/module3-nosql-and-document-oriented-databases/Pipfile.lock
new file mode 100644
index 00000000..9a51a282
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/Pipfile.lock
@@ -0,0 +1,20 @@
+{
+ "_meta": {
+ "hash": {
+ "sha256": "7e7ef69da7248742e869378f8421880cf8f0017f96d94d086813baa518a65489"
+ },
+ "pipfile-spec": 6,
+ "requires": {
+ "python_version": "3.7"
+ },
+ "sources": [
+ {
+ "name": "pypi",
+ "url": "https://pypi.org/simple",
+ "verify_ssl": true
+ }
+ ]
+ },
+ "default": {},
+ "develop": {}
+}
diff --git a/module3-nosql-and-document-oriented-databases/WorkingWMongo/Mongo.py b/module3-nosql-and-document-oriented-databases/WorkingWMongo/Mongo.py
new file mode 100644
index 00000000..259d4e11
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/WorkingWMongo/Mongo.py
@@ -0,0 +1,78 @@
+import pymongo
+password = 'suh264tUm'
+dbname = 'test'
+connection = ('mongodb+srv://jonatan5696:' + password +
+ '@cluster0.jalzo.gcp.mongodb.net/' + dbname +
+ '?retryWrites=true&w=majority')
+client = pymongo.MongoClient(connection)
+db = client.test
+print(db)
+print('\n')
+curs = db.test.find({'x': 1})
+print(f'list of curs is, {list(curs)}\n')
+
+# Let's add some more interesting documents
+
+byrnes_doc = {
+ 'animal': 'manatee',
+ 'color': 'green',
+ 'number': 7
+}
+
+daves_doc = {
+ 'animal': 'bat',
+ 'color': 'red',
+ 'number': 1000
+}
+
+sasanas_doc = {
+ 'animal': 'orca',
+ 'color': 'blue',
+ 'number': 9
+}
+
+tylers_doc = {
+ 'animal': 'hippogryph',
+ 'cities': ['New York', 'Houston']
+}
+
+walters_doc = {
+ 'color': 'chartreuse',
+ 'animal': 'platypus'
+}
+
+aarons_doc = {
+ 'inner_dict': {
+ 'x': 2,
+ 'y': -4,
+ 'z': 'banana'
+ },
+ 'another_key': (2, 6, 3)
+}
+
+# Let's put them all in a list for convenience
+#all_docs = [byrnes_doc, daves_doc, sasanas_doc, tylers_doc, walters_doc,
+ aarons_doc]
+
+print(f'len of docs {len(all_docs)}\n')
+
+# Insert Documents Back Into Data Base
+db.test.insert_many(all_docs)
+
+# db.test.insert_one({
+# 'animal': 'tiger',
+# 'color': 'green',
+# 'city': 'Paris'
+# })
+
+# Look for Documents
+print(f'looking for docs: {list(db.test.find())}')
+
+print('not in list form', db.test.find_one({'color': 'green'}))
+print('in list form', list(db.test.find({'color': 'green'})))
+
+more_docs = []
+for i in range(10):
+ doc = {'even': i % 2 == 0}
+ doc['value'] = i
+ more_docs.append(doc)
diff --git a/module3-nosql-and-document-oriented-databases/WorkingWMongo/Mongo1.py b/module3-nosql-and-document-oriented-databases/WorkingWMongo/Mongo1.py
new file mode 100644
index 00000000..8797ac03
--- /dev/null
+++ b/module3-nosql-and-document-oriented-databases/WorkingWMongo/Mongo1.py
@@ -0,0 +1,34 @@
+import pymongo
+import sqlite3
+
+password = 'suh264tUm'
+dbname = 'character_data_base'
+connection = ('mongodb+srv://jonatan5696:' + password +
+ '@cluster0.jalzo.gcp.mongodb.net/' + dbname +
+ '?retryWrites=true&w=majority')
+client = pymongo.MongoClient(connection)
+db = client.character_data
+
+# Step 1 - Extract, getting data out of SQlite3
+conn = sqlite3.connect('rpg_db.sqlite3')
+curs = conn.cursor()
+
+# goal is to copy the charactercreator_character table
+get_characters = 'SELECT * FROM charactercreator_character;'
+characters = curs.execute(get_characters).fetchall()
+print(characters)
+# character_observations = []
+# for char in characters:
+# get_rpg_characters = {'Character_id':char[0],
+# 'name':char[1],
+# 'level':char[2],
+# 'exp':char[3],
+# 'hp': char[4],
+# 'strength': char[5],
+# 'intelligence': char[6],
+# 'dexterity': char[7],
+# 'wisdom': char[8]}
+# character_observations.append(get_rpg_characters)
+# db.character_data.insert_many(character_observations)
+
+print(db.character_data.find_one({'name':'Minus c'}))
\ No newline at end of file
diff --git a/module3-nosql-and-document-oriented-databases/WorkingWMongo/rpg_db.sqlite3 b/module3-nosql-and-document-oriented-databases/WorkingWMongo/rpg_db.sqlite3
new file mode 100644
index 00000000..837d7f16
Binary files /dev/null and b/module3-nosql-and-document-oriented-databases/WorkingWMongo/rpg_db.sqlite3 differ