Esempio n. 1
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
from pypchutils.generic import create_logger
from pyspark.sql import SparkSession, DataFrame
from pyspark.sql import types as T, functions as F

from pyspark_demo.commons.rate_processor import RateProcessor

logger = create_logger(__name__, level="info")


def gen_test_data(spark: SparkSession, verbose: int = 1) -> DataFrame:
    """
    """
    # Create a Spark data frame
    schema = T.StructType([
        T.StructField("date", T.StringType(), True),
        T.StructField("user_id", T.IntegerType(), True),
        T.StructField("user_name", T.StringType(), True),
        T.StructField("total_orders", T.IntegerType(), True),
        T.StructField("total_amount", T.FloatType(), True),
    ])
    data = [
        ("2020-01-01", 1, "AA", 111, 111.11),
        ("2020-01-01", 2, "BB", 222, 222.22),
        ("2020-04-04", 1, "AA", 444, 444.44),
        ("2020-04-01", 3, "CC", 333, 333.33),
    ]
    data = spark.createDataFrame(data, schema=schema)
Tutorial: https://towardsdatascience.com/an-easy-introduction-to-pytorch-for-neural-networks-3ea08516bff2

# Usage
export PYTHONPATH=$(pwd)
"""
import json
import os
import pandas as pd
from pypchutils.generic import create_logger
import torch
import torch.nn as nn
from torch.nn import functional as F
import torchvision
import torchvision.transforms as transforms

logger = create_logger(__name__)


class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.conv1 = nn.Conv2d(1, 64, kernel_size=(3, 3), padding=1)
        self.conv2 = nn.Conv2d(64, 64, kernel_size=(3, 3), padding=1)
        self.max_pool = nn.MaxPool2d(2, 2)
        self.global_pool = nn.AvgPool2d(7)
        self.fc1 = nn.Linear(64, 64)
        self.fc2 = nn.Linear(64, 10)

    def forward(self, x):
        """Set up the model by stacking all the layers together"""
        x = F.relu(self.conv1(x))