{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import hashlib\n", "\n", "# Input string\n", "text = \"22/05/2024\" + \"IFD YADDA YADDA\" + \"58,00\" + \"0,00\"\n", "\n", "# Create SHA-256 hash\n", "sha256_hash = hashlib.sha256(text.encode()).hexdigest()\n", "print(\"SHA-256 Hash:\", sha256_hash)\n", "\n", "# MD5 hash (128-bit)\n", "md5_hash = hashlib.md5(text.encode()).hexdigest()\n", "print(\"MD5 Hash:\", md5_hash)\n", "\n", "# SHA-1 hash (160-bit)\n", "sha1_hash = hashlib.sha1(text.encode()).hexdigest()\n", "print(\"SHA-1 Hash:\", sha1_hash)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "test_file = 'OUROCARD_VISA_INFINITE-Ago_24.txt'" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "with open('OUROCARD_VISA_INFINITE-Ago_24.txt', 'r') as reader:\n", " data = reader.read()\n", " print(data)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import re\n", "\n", "# Open the text file\n", "with open('OUROCARD_VISA_INFINITE-Ago_24.txt', 'r') as file_name:\n", " # Read the contents of the file\n", " contents = file_name.read()\n", "\n", "# Define the regex pattern to match\n", "pattern = r'\\d{2}\\.\\d{2}\\.\\d{4}.{23}.{14}.{2}\\s*\\d+,\\d{2}\\s*\\d+,\\d{2}'\n", "\n", "# Iterate over the lines that match the pattern\n", "for matches in re.finditer(pattern, contents):\n", " print(matches.group())" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ " 1 - DANIEL\n", " 4 - IZABELY\n" ] } ], "source": [ "import re\n", "\n", "# Open the text file\n", "with open('./documents/OUROCARD_VISA_INFINITE-Ago_24.txt', 'r', encoding=\"latin\") as file_name:\n", " # Read the contents of the file\n", " contents = file_name.read()\n", "\n", "# Define the regex pattern to match\n", "pattern = r\"\\s\\d\\s?-\\s?([A-Z]+)\"\n", "\n", "# Iterate over the lines that match the pattern\n", "for matches in re.finditer(pattern, contents):\n", " print(matches.group())" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import re\n", "\n", "# Open the text file\n", "with open('OUROCARD_VISA_INFINITE-Ago_24.txt', 'r') as file_name:\n", " # Read the contents of the file\n", " contents = file_name.read()\n", "\n", "# Define the regex patterns\n", "dan_pattern = r'*DANIEL.*'\n", "iza_pattern = r'.*IZABELY.*'\n", "line_pattern = r'\\d{2}\\.\\d{2}\\.\\d{4}.{23}.{14}.{2}\\s*\\d+,\\d{2}\\s*\\d+,\\d{2}'\n", "\n", "# Iterate over the lines that match the pattern\n", "for matches in re.finditer(line_pattern, contents):\n", " print(matches.group())\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "# Open the text file\n", "with open('table-test.txt', 'r') as file_name:\n", " # Read the contents of the file\n", " contents = file_name.readlines()\n", "\n", "# Initialize lists to store the lines under each table\n", "table_a_lines = []\n", "table_b_lines = []\n", "\n", "# Flag to determine which table section we are in\n", "current_table = None\n", "\n", "# Iterate over the lines in the file\n", "for line in contents:\n", " line = line.strip() # Remove leading and trailing whitespace\n", "\n", " # Check for TABLEA and TABLEB\n", " if line == 'TABLEA':\n", " current_table = 'TABLEA'\n", " elif line == 'TABLEB':\n", " current_table = 'TABLEB'\n", " else:\n", " # Add lines to the appropriate list based on the current table\n", " if current_table == 'TABLEA':\n", " table_a_lines.append(line)\n", " elif current_table == 'TABLEB':\n", " table_b_lines.append(line)\n", "\n", "# Print the results\n", "print('Lines under TABLEA:')\n", "for data in table_a_lines:\n", " print(data)\n", "\n", "print('\\nLines under TABLEB:')\n", "for data in table_b_lines:\n", " print(data)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import re\n", "from datetime import date, datetime\n", "import locale\n", "\n", "locale.setlocale(locale.LC_ALL, 'pt_BR.UTF-8')\n", "\n", "# Open the text file\n", "with open('OUROCARD_VISA_INFINITE-Ago_24.txt', 'r', encoding='latin') as file_name:\n", " # Read the contents of the file\n", " contents = file_name.readlines()\n", "\n", "# Define the regex patterns\n", "dan_pattern = r'1 - DANIEL.*'\n", "iza_pattern = r'4 - IZABELY.*'\n", "line_pattern = r'\\d{2}\\.\\d{2}\\.\\d{4}.{23}.{14}.{2}\\s*\\d+,\\d{2}\\s*\\d+,\\d{2}'\n", "line_group_pattern = r'(\\d{2})\\.(\\d{2})\\.(\\d{4})(.{23})(.{14})(.{2})(\\s*\\d+,\\d{2})(\\s*\\d+,\\d{2})'\n", "\n", "# Lists\n", "list_dan = []\n", "list_iza = []\n", "current_list = None\n", "\n", "insert_bulk = []\n", "\n", "# Iterate all lines\n", "for line in contents:\n", " line = line.strip()\n", " if re.match(dan_pattern, line):\n", " current_list = 'list_dan'\n", " print('found Dan')\n", " elif re.match(iza_pattern, line):\n", " current_list = 'list_iza'\n", " print('found Iza')\n", " else:\n", " if re.match(line_pattern, line):\n", " if current_list == 'list_dan':\n", " print(\"dan\", line)\n", " list_dan.append(line)\n", " if current_list == 'list_iza':\n", " print(\"iza\", line)\n", " list_iza.append(line)\n", "\n", "print('list_dan - tuples for insert')\n", "for item in list_dan:\n", " matches = re.search(line_group_pattern, item)\n", " tTdate = str(date(int(matches.group(3)), int(matches.group(2)), int(matches.group(1))))\n", " tAccount = 1\n", " tMemo = matches.group(4)\n", " tCity = matches.group(5)\n", " tCountry = matches.group(6)\n", " tOutflow = matches.group(7).strip().replace(',', '.')\n", " tInflow = matches.group(8).strip().replace(',', '.')\n", " tOwner = 1\n", " tInstallments = 1\n", " tCreated = str(datetime.now(tz=None))\n", " tUpdated = None\n", " insert_bulk.append(( tTdate, tAccount, tMemo, tCity, tCountry, tOutflow, tInflow, tOwner, tInstallments, tCreated, tUpdated ))\n", "\n", "print('list_dan - tuples for insert')\n", "for item in list_iza:\n", " matches = re.search(line_group_pattern, item)\n", " tTdate = str(date(int(matches.group(3)), int(matches.group(2)), int(matches.group(1))))\n", " tAccount = 1\n", " tMemo = matches.group(4)\n", " tCity = matches.group(5)\n", " tCountry = matches.group(6)\n", " tOutflow = matches.group(7).strip().replace(',', '.')\n", " tInflow = matches.group(8).strip().replace(',', '.')\n", " tOwner = 2\n", " tInstallments = 1\n", " tCreated = str(datetime.now(tz=None))\n", " tUpdated = None\n", " insert_bulk.append(( tTdate, tAccount, tMemo, tCity, tCountry, tOutflow, tInflow, tOwner, tInstallments, tCreated, tUpdated ))\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def logic_test(input: str = None):\n", " yadda = \"yadda\"\n", " return input or yadda\n", "\n", "logic_test()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "dictTest = {\n", " \"owner1\": {\n", " \"owner_label\": \"foo\",\n", " \"owner_id\": 1,\n", " \"list1\": [\"thingies, thingies, 42\"],\n", " },\n", " \"owner2\": {\n", " \"owner_label\": \"bar\",\n", " \"owner_id\": 2,\n", " \"list1\": [\"thingies, thingies, 42\"],\n", " },\n", "}\n", "\n", "for owner in dictTest:\n", " print(dictTest[owner][\"owner_id\"], dictTest[owner][\"owner_label\"])\n", " for item in dictTest[owner][\"list1\"]:\n", " print(item)\n", "\n", "dictTest[\"owner1\"][\"owner_label\"] = \"yadda\"\n", "\n", "for owner in dictTest:\n", " print(dictTest[owner][\"owner_id\"], dictTest[owner][\"owner_label\"])\n", " for item in dictTest[owner][\"list1\"]:\n", " print(item)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "param1 = \"foo\"\n", "param2 = \"bar\"\n", "testy = {}\n", "testy[param1] = {}\n", "testy[param1][param2] = [\"what\", \"when\", \"why\"]\n", "testy[param1][\"number\"] = 1\n", "\n", "print(testy)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "bigDict = {\n", " \"list_daniel\": {\n", " \"owner_name\": \"DANIEL\",\n", " \"owner_id\": 1,\n", " \"tlist\": [\n", " \"25.07.2024PGTO DEBITO CONTA 8611 000006025 200211 -24.420,24 0,00\",\n", " \"15.07.2024NEW EMPAR EMPREENDIMENTCORUMBA DE GO BR 10,00 0,00\",\n", " \"14.07.2024NAZO SUSHI BAR BRASILIA BR 446,22 0,00\",\n", " \"18.07.2024IFD*IFOOD.COM AGENCIA DOsasco BR 12,90 0,00\",\n", " \"19.07.2024IFD*RC MELO COMERCIO D BRASILIA BR 138,30 0,00\",\n", " \"21.07.2024GRUPO FARTURA DE HORTI BRASILIA BR 105,79 0,00\",\n", " \"20.07.2024IFD*JL COMERCIO VAREJISBRASILIA BR 134,70 0,00\",\n", " \"22.07.2024MURAKAMI BRASILIA BR 55,90 0,00\",\n", " \"22.07.2024ZP *CANTINAGOODLANCHEV Brasilia BR 8,40 0,00\",\n", " \"23.07.2024CANTINA E CIA BRASILIA BR 10,00 0,00\",\n", " \"25.07.2024CANTINA E CIA BRASILIA BR 3,50 0,00\",\n", " \"24.07.2024IFD*NFE COMERCIO DE ALIBRASILIA BR 101,89 0,00\",\n", " \"27.07.2024BENITA PANINOTECA BRASILIA BR 156,80 0,00\",\n", " \"26.07.2024IFD*BSQUARE PIZZA BURGEBRASILIA BR 123,99 0,00\",\n", " \"04.08.2024IFD*RC MELO COMERCIO DEBRASILIA BR 103,29 0,00\",\n", " \"06.08.2024CANTINA E CIA BRASILIA BR 6,00 0,00\",\n", " \"06.08.2024ZP *CANTINAGOODLANCHEV Brasilia BR 210,00 0,00\",\n", " \"07.08.2024IFD*BSQUARE PIZZA BURGEBRASILIA BR 232,99 0,00\",\n", " \"22.07.2024DROGASIL 2067 BRASILIA BR 204,99 0,00\",\n", " \"22.07.2024REDE BRASIL DRUGSTORE BRASILIA BR 26,74 0,00\",\n", " \"10.08.2024PAGUE MENOS 1225 BRASILIA BR 406,30 0,00\",\n", " \"13.07.2024PAG*EduardoMeireles AGUAS LINDAS BR 14,00 0,00\",\n", " \"13.07.2024MR JOHN BARBEARIA LTDA BRASILIA BR 60,00 0,00\",\n", " \"18.07.2024NETFLIX.COM SAO PAULO BR 44,90 0,00\",\n", " \"19.07.2024PAG*FolhaDeSPaulo SAO PAULO BR 29,90 0,00\",\n", " \"19.07.2024IFD*CR EXPRESS Osasco BR 10,00 0,00\",\n", " \"22.07.2024APPLE.COM/BILL SAO PAULO BR 97,90 0,00\",\n", " \"22.07.2024MERCADOLIVRE*MERCADOLIVOSASCO BR 444,40 0,00\",\n", " \"25.07.2024MP*5PRODUTOS OSASCO BR 308,93 0,00\",\n", " \"25.07.2024MERCADOLIVRE*SABORESDAMOSASCO BR 258,00 0,00\",\n", " \"27.07.2024MP*MELIMAIS OSASCO BR 17,99 0,00\",\n", " \"27.07.2024Wellhub Gympass BR GympSao Paulo BR 399,90 0,00\",\n", " \"26.07.2024IFD*CR EXPRESS Osasco BR 10,00 0,00\",\n", " \"28.07.2024SAMS CLUB BRASI 4929 BRASILIA BR 2.256,92 0,00\",\n", " \"29.07.2024MG LAVA JATO BRASILIA BR 100,00 0,00\",\n", " \"29.07.2024MERCADOLIVRE*GLDECOR OSASCO BR 50,75 0,00\",\n", " \"01.08.2024MR JOHN BARBEARIA LTDA BRASILIA BR 54,00 0,00\",\n", " \"04.08.2024APPLE.COM/BILL SAO PAULO BR 54,90 0,00\",\n", " \"04.08.2024IFD*SCORPIONS EXPRESS Osasco BR 10,00 0,00\",\n", " \"08.08.2024CASCOL COMBUSTIVEIS BRASILIA BR 239,37 0,00\",\n", " \"07.08.2024IFD*GRAN LOG EXPRESS Osasco BR 10,00 0,00\",\n", " \"09.08.2024SUPERAUTOR C*Supe NITEROI BR 247,86 0,00\",\n", " \"09.08.2024UBER* TRIP WWW.UBER.COM. BR 30,89 0,00\",\n", " \"09.08.2024UBER *TRIP HELP.UBER.COSAO PAULO BR 10,00 0,00\",\n", " \"09.08.2024UBER * PENDING SAO PAULO BR 27,93 0,00\",\n", " \"09.08.2024UBER *TRIP HELP.UBER.COSAO PAULO BR 5,00 0,00\",\n", " \"15.07.2024VELOE BARUERI BR 22,26 0,00\",\n", " \"19.07.2024POUSADA PIRENEUS RESOR PIRENOPOLIS BR 179,21 0,00\",\n", " \"13.07.2024DL*GOOGLE YouTub SAO PAULO BR 41,90 0,00\",\n", " \"16.07.2024STEAMGAMES.COM 42595229912-1844160 WA 24,00 0,00\",\n", " \"16.07.2024IOF - COMPRA NO EXTERIOR 0,26 0,00\",\n", " \"16.07.2024STEAM PURCHASE SEATTLE DE 127,19 0,00\",\n", " \"18.07.2024IOF - COMPRA NO EXTERIOR 1,39 0,00\",\n", " \"22.07.2024PAG*XsollaGames Sao Paulo BR 26,99 0,00\",\n", " \"11.04.2024PRODUTOS GLOB PARC 04/12 RIO DE JANEIBR 44,90 0,00\",\n", " \"15.01.2024MP*MUNDODOSCO PARC 07/10 SAO PAULO BR 159,90 0,00\",\n", " \"17.05.2024PAG*Folhadesp PARC 03/06 Sao Paulo BR 109,60 0,00\",\n", " \"17.10.2023BIANCHINI AUT PARC 10/10 BRASILIA BR 535,00 0,00\",\n", " \"27.05.2024PARC=112 BRAS PARC 03/12 BRASILIA BR 452,00 0,00\",\n", " ],\n", " },\n", " \"list_1844160\": {\"owner_name\": \"1844160\", \"owner_id\": 2},\n", " \"list_izabely\": {\n", " \"owner_name\": \"IZABELY\",\n", " \"owner_id\": 3,\n", " \"tlist\": [\n", " \"15.07.2024SALTO CORUMBA CORUMBA DE GO BR 163,85 0,00\",\n", " \"12.07.2024PG *TON MINHACANTINA BRASILIA BR 35,00 0,00\",\n", " \"12.07.2024PASTELARIA VICOSA IV BRASILIA BR 130,00 0,00\",\n", " \"12.07.2024PASTELARIA VICOSA IV BRASILIA BR 19,00 0,00\",\n", " \"13.07.2024CANTINA E CIA BRASILIA BR 4,50 0,00\",\n", " \"18.07.2024MERCADOLIVRE*3PRODUTOS OSASCO BR 362,28 0,00\",\n", " \"20.07.2024COFFEE BIKE CAFES ESPECBRASILIA BR 10,00 0,00\",\n", " \"19.07.2024IFD*ARCOS DOURADOS COMEBRASILIA BR 48,99 0,00\",\n", " \"22.07.2024RESTAURANTE FAROFINA BRASILIA BR 43,90 0,00\",\n", " \"23.07.2024PAG*DiogoLealPimenta BRASILIA BR 183,00 0,00\",\n", " \"28.07.2024TREVISO GALETERIA E P BRASILIA BR 200,20 0,00\",\n", " \"30.07.2024OLINDA COMIDA NORDESTI BRASILIA BR 66,45 0,00\",\n", " \"03.08.2024DULCE PATAGONIA BRASILIA BR 177,90 0,00\",\n", " \"03.08.2024HOT DOG CLUB BRASILIA BR 39,90 0,00\",\n", " \"04.08.2024RESTAURANTE SAO JOAO TERESINA BR 108,00 0,00\",\n", " \"04.08.2024GELATO E GRANO TERESINA BR 24,00 0,00\",\n", " \"05.08.2024F L L MELO LTDA SAO PAULO BR 63,00 0,00\",\n", " \"10.08.2024CREMERIA ITALIANA BRASILIA BR 49,00 0,00\",\n", " \"14.07.2024RITUARIA*Rituaria SAO PAULO BR 448,20 0,00\",\n", " \"24.07.2024BIOEXATA FARMACIA BRASILIA BR 73,70 0,00\",\n", " \"12.07.2024CASCOL COMBUSTIVEIS BRASILIA BR 297,86 0,00\",\n", " \"12.07.2024LIMBER SOFTWARE E CONS SAO LUIZ DO P BR 54,00 0,00\",\n", " \"13.07.2024PAG*EduardoMeireles AGUAS LINDAS BR 22,00 0,00\",\n", " \"13.07.2024PAG*EduardoMeireles AGUAS LINDAS BR 36,00 0,00\",\n", " \"13.07.2024PAG*CidaRommanel BRASILIA BR 30,00 0,00\",\n", " \"13.07.2024ALLPARK EMPREENDIMENTOSGoiania BR 6,00 0,00\",\n", " \"14.07.2024MERCADOLIVRE*CHINALINK OSASCO BR 445,89 0,00\",\n", " \"18.07.2024PG *S S MENDES COMERCI MOGI DAS CRUZ BR 150,12 0,00\",\n", " \"19.07.2024MERCADOLIVRE*3PRODUTOS OSASCO BR 276,52 0,00\",\n", " \"03.08.2024WOW*SALE COMERCIO E SE Brasilia BR 165,00 0,00\",\n", " \"07.08.2024PARENTELA PANIFICADORA BRASILIA BR 64,90 0,00\",\n", " \"08.08.2024FranciscoDeAssis BRASILIA BR 36,00 0,00\",\n", " \"09.08.2024BONNAPAN SEU DIA MAIS BRASILIA BR 23,08 0,00\",\n", " \"10.08.2024MP*BRILHODASARTE OSASCO BR 300,00 0,00\",\n", " \"11.07.2024CARREFOUR PL2 338 BRASILIA BR 83,17 0,00\",\n", " \"15.07.2024UBER * PENDING SAO PAULO BR 19,90 0,00\",\n", " \"22.07.2024UBER * PENDING SAO PAULO BR 8,98 0,00\",\n", " \"23.07.2024UBER* TRIP WWW.UBER.COM. BR 8,90 0,00\",\n", " \"24.07.2024UBER* TRIP WWW.UBER.COM. BR 8,98 0,00\",\n", " \"29.07.2024UBER * PENDING SAO PAULO BR 8,94 0,00\",\n", " \"30.07.2024UBER* TRIP WWW.UBER.COM. BR 8,94 0,00\",\n", " \"31.07.2024UBER* TRIP WWW.UBER.COM. BR 8,96 0,00\",\n", " \"06.08.2024UBER* TRIP WWW.UBER.COM. BR 8,99 0,00\",\n", " \"10.08.2024UBER* TRIP WWW.UBER.COM. BR 20,00 0,00\",\n", " \"10.08.2024UBER* TRIP WWW.UBER.COM. BR 9,42 0,00\",\n", " \"10.08.2024UBER * PENDING SAO PAULO BR 13,97 0,00\",\n", " \"12.07.2024CIDA REIS MODA FITNESS BRASILIA BR 300,00 0,00\",\n", " \"15.07.2024LANCHONETE SERRA RODO COCALZINHO DE BR 43,00 0,00\",\n", " \"16.07.2024POUSADA PIRENEUS RESOR PIRENOPOLIS BR 1.704,00 0,00\",\n", " \"19.07.2024POUSADA PIRENEUS RESOR PIRENOPOLIS BR 1.105,63 0,00\",\n", " \"09.08.2024HOTEL GOYA P*hote RIO DE JANEIR BR 424,20 0,00\",\n", " \"12.04.2024LISTO*CLINICA PARC 04/04 BRASILIA BR 2.000,00 0,00\",\n", " \"17.01.2024BRASILIA EMPR PARC 07/12 BRASILIA BR 599,00 0,00\",\n", " \"07.01.2024PG *B4A GLAMB PARC 08/12 SAO PAULO BR 74,90 0,00\",\n", " ],\n", " },\n", "}\n", "\n", "\n", "for key in bigDict:\n", " for item in bigDict[key][\"tlist\"]:\n", " print(item)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import re\n", "\n", "file_list = [\"OUROCARD_VISA_INFINITE-Ago_24.txt\", \"OUROCARD_VISA_INFINITE-Jul_24.txt\", \"OUROCARD_VISA_INFINITE-Próxima_Fatura.txt\", \"OUROCARD_VISA_INFINITE-Próxima_Fatura(1).txt\"]\n", "partial_invoice_pattern = r\"L A N Ç A M E N T O S F U T U R O S\"\n", "\n", "partial = False\n", "for file_name in file_list:\n", " with open(file_name, \"r\", encoding=\"latin\") as file:\n", " contents = file.readlines()\n", " for line in contents:\n", " if re.findall(partial_invoice_pattern, line):\n", " partial = True\n", "\n", " if partial:\n", " print(f\"file {file.name} is partial\")\n", " else:\n", " print(f\"file {file.name} is full\")\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import datetime\n", "datetime.datetime.now().year" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "CONNECTED! \n", "full result => [(1, 'Daniel', datetime.datetime(2024, 9, 10, 15, 58, 47), None), (2, 'Izabely', datetime.datetime(2024, 9, 10, 15, 59, 51), None)]\n", "Daniel\n", "DONE!\n" ] } ], "source": [ "from mysql.connector import connect, Error\n", "\n", "query = \"SELECT * FROM OWNER\"\n", "\n", "try:\n", " with connect(\n", " host=\"localhost\",\n", " user=\"root\",\n", " password=\"pleasehashapasswordomg\",\n", " database=\"default\",\n", " ) as connection:\n", " print(\"CONNECTED!\", connection)\n", " with connection.cursor() as cursor:\n", " cursor.execute(query)\n", " result = cursor.fetchall()\n", " if result:\n", " print(\"full result =>\", result)\n", " print(result[0][1])\n", " else:\n", " print(\"No data found\")\n", " print(\"DONE!\")\n", "except Error as e:\n", " print(e)\n", "finally:\n", " connection.close()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "class DataHolder:\n", " def __init__(self, name, age, email):\n", " \"\"\"Initialize the DataHolder with name, age, and email.\"\"\"\n", " self.name = name\n", " self.age = age\n", " self.email = email\n", "\n", " def display_info(self):\n", " \"\"\"Display the information of the DataHolder.\"\"\"\n", " print(f\"Name: {self.name}\")\n", " print(f\"Age: {self.age}\")\n", " print(f\"Email: {self.email}\")\n", "\n", " def update_age(self, new_age):\n", " \"\"\"Update the age of the DataHolder.\"\"\"\n", " self.age = new_age\n", " print(f\"Age updated to: {self.age}\")" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "from decimal import Decimal\n", "\n", "string = \"12.99\"\n", "test = Decimal(string)\n", "\n", "print(test)" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "class Transaction:\n", " def __init__(self) -> None:\n", " pass" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "\"docker exec -i f5a5a73ad60e mysqldump -uroot -ppleasehashapasswordomg --databases default --skip-comments > /home/f2256342/forge/robopato/dump.sql\"" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "['output',\n", " 'test_daniel_next.txt',\n", " 'cc.xml',\n", " 'extrato.txt',\n", " 'OUROCARD_VISA_INFINITE-Set_24.txt',\n", " 'extrato_ago_24.csv',\n", " 'cc.pdf',\n", " 'OUROCARD_VISA_INFINITE-Jun_24.txt',\n", " 'staging',\n", " 'OUROCARD_VISA_INFINITE-Ago_24.txt',\n", " 'extrato_30d.ofx',\n", " 'input',\n", " 'extrato_30d.csv',\n", " 'OUROCARD_VISA_INFINITE-Próxima_Fatura.txt',\n", " 'movimentacoes(1).csv',\n", " 'table-test.txt',\n", " 'movimentacoes.csv',\n", " 'OUROCARD_VISA_INFINITE-Jul_24.txt',\n", " 'extrato_ago_24.ofx',\n", " 'test_iza_next.txt',\n", " 'movimentacoes(2).csv']" ] }, "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ "import os\n", "\n", "os.listdir(\"./documents\")" ] }, { "cell_type": "code", "execution_count": 11, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['test_daniel_next.txt', 'cc.xml', 'extrato.txt', 'OUROCARD_VISA_INFINITE-Set_24.txt', 'extrato_ago_24.csv', 'cc.pdf', 'OUROCARD_VISA_INFINITE-Jun_24.txt', 'OUROCARD_VISA_INFINITE-Ago_24.txt', 'extrato_30d.ofx', 'extrato_30d.csv', 'OUROCARD_VISA_INFINITE-Próxima_Fatura.txt', 'movimentacoes(1).csv', 'table-test.txt', 'movimentacoes.csv', 'OUROCARD_VISA_INFINITE-Jul_24.txt', 'extrato_ago_24.ofx', 'test_iza_next.txt', 'movimentacoes(2).csv']\n" ] } ], "source": [ "import os\n", "\n", "with os.scandir(\"./documents\") as entries:\n", " files_only = [entry.name for entry in entries if entry.is_file()]\n", "\n", "print(files_only)" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['./documents/OUROCARD_VISA_INFINITE-Set_24.txt', './documents/OUROCARD_VISA_INFINITE-Jun_24.txt', './documents/OUROCARD_VISA_INFINITE-Ago_24.txt', './documents/OUROCARD_VISA_INFINITE-Próxima_Fatura.txt', './documents/OUROCARD_VISA_INFINITE-Jul_24.txt']\n" ] } ], "source": [ "import glob\n", "\n", "files_only = glob.glob(\"./documents/OUROCARD*.txt\")\n", "\n", "print(files_only)" ] }, { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['test_daniel_next.txt', 'cc.xml', 'extrato.txt', 'OUROCARD_VISA_INFINITE-Set_24.txt', 'extrato_ago_24.csv', 'cc.pdf', 'OUROCARD_VISA_INFINITE-Jun_24.txt', 'OUROCARD_VISA_INFINITE-Ago_24.txt', 'extrato_30d.ofx', 'extrato_30d.csv', 'OUROCARD_VISA_INFINITE-Próxima_Fatura.txt', 'movimentacoes(1).csv', 'table-test.txt', 'movimentacoes.csv', 'OUROCARD_VISA_INFINITE-Jul_24.txt', 'extrato_ago_24.ofx', 'test_iza_next.txt', 'movimentacoes(2).csv']\n" ] } ], "source": [ "from pathlib import Path\n", "\n", "dir_path = Path(\"./documents\")\n", "files_only = [file.name for file in dir_path.iterdir() if file.is_file()]\n", "\n", "print(files_only) # This will print only the files in the specified path" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "['./documents/extrato_ago_24.csv', './documents/extrato_30d.csv', './documents/extrato-set-2024.csv', './documents/extrato-jun-2024.csv', './documents/extrato-jul-2024.csv']\n" ] } ], "source": [ "import glob\n", "\n", "files_only = glob.glob(\"./documents/extrato*.csv\")\n", "\n", "print(files_only)" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "CONNECTED! \n", "DONE!\n" ] } ], "source": [ "import csv\n", "from datetime import datetime\n", "\n", "# Read the CSV file and extract the \"CATEGORY\" column\n", "categories = set()\n", "with open('PAYEE_FILTER.csv', newline='', encoding='utf-8') as csvfile:\n", " reader = csv.DictReader(csvfile)\n", " for row in reader:\n", " category = row['CATEGORY']\n", " if category: # Only add non-empty categories\n", " categories.add(category)\n", "\n", "# Create a list of tuples with the unique categories and current datetime as a string\n", "category_tuples = [(category, str(datetime.now(tz=None))) for category in categories]\n", "\n", "insert_query = \"INSERT IGNORE INTO default.CATEGORY (NAME, CREATED) VALUES (%s, %s)\"\n", "\n", "\n", "from mysql.connector import connect, Error\n", "\n", "try:\n", " with connect(\n", " host=\"127.0.0.1\",\n", " user=\"root\",\n", " password=\"pleasehashapasswordomg\",\n", " database=\"default\",\n", " ) as connection:\n", " print(\"CONNECTED!\", connection)\n", " with connection.cursor() as cursor:\n", " cursor.executemany(insert_query, category_tuples)\n", " connection.commit()\n", " print(\"DONE!\")\n", "except Error as e:\n", " print(e)\n", "finally:\n", " connection.close()" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[('Compras', '2024-11-08 12:28:12.251054'), ('Operacional', '2024-11-08 12:28:12.251054'), ('Renda', '2024-11-08 12:28:12.251054'), ('Transporte', '2024-11-08 12:28:12.251054'), ('Cora', '2024-11-08 12:28:12.251054'), ('Impostos', '2024-11-08 12:28:12.251054'), ('Alimentação', '2024-11-08 12:28:12.251054')]\n" ] } ], "source": [ "import csv\n", "from datetime import datetime\n", "\n", "# Read the CSV file and extract the \"CATEGORY\" column\n", "categories = set()\n", "with open('PAYEE_FILTER.csv', newline='', encoding='utf-8') as csvfile:\n", " reader = csv.DictReader(csvfile)\n", " for row in reader:\n", " category = row['CATEGORY']\n", " if category: # Only add non-empty categories\n", " categories.add(category)\n", "\n", "# Create a list of tuples with the unique categories and current datetime as a string\n", "category_tuples = [(category, str(datetime.now(tz=None))) for category in categories]\n", "\n", "print(category_tuples)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[('Compras Digitais', '2024-11-08 15:27:36.418568'), ('Salários', '2024-11-08 15:27:36.418568'), ('Educação', '2024-11-08 15:27:36.418568'), ('Aplicativos', '2024-11-08 15:27:36.418568'), ('Restaurantes', '2024-11-08 15:27:36.418568'), ('Celular', '2024-11-08 15:27:36.418568'), ('PLR', '2024-11-08 15:27:36.418568'), ('Internet', '2024-11-08 15:27:36.418568'), ('Compras Físicas', '2024-11-08 15:27:36.418568'), ('Cashbacks', '2024-11-08 15:27:36.418568'), ('IOF', '2024-11-08 15:27:36.418568')]\n" ] } ], "source": [ "import csv\n", "from datetime import datetime\n", "\n", "# Read the CSV file and extract the \"SUBCATEGORY\" column\n", "subcategories = set()\n", "with open('PAYEE_FILTER.csv', newline='', encoding='utf-8') as csvfile:\n", " reader = csv.DictReader(csvfile)\n", " for row in reader:\n", " subcategory = row['SUBCATEGORY']\n", " if subcategory: # Only add non-empty categories\n", " subcategories.add(subcategory)\n", "\n", "# Create a list of tuples with the unique categories and current datetime as a string\n", "subcategory_tuples = [(subcategory, str(datetime.now(tz=None))) for subcategory in subcategories]\n", "\n", "print(subcategory_tuples)" ] } ], "metadata": { "kernelspec": { "display_name": "3.11.4", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.4" } }, "nbformat": 4, "nbformat_minor": 2 }