admin管理员组文章数量:1301552
I'm building a graphing program for my work. I've gotten this far with the help of chatGPT, but I can't seem to figure out how to implement the two features I want. I really need your help.
Behavior Description : Gives the user a start and end date and plots a graph. The graphs are categorized as holiday, day before holiday, day after holiday, and normal. The day is divided into four quarters to create four pages per day.
Desired features :
I'd like the graphs to continue, especially the bottom graph (ch in range(1, 5)) where I normally want page 1 to end at 180 degrees and page 2 to start at 180 degrees, but now it always starts at the beginning.
I want to draw the normal bottom graph with randomness. There is randomness now, but within the same page, it is drawn in too much the same pattern. I want to give randomness within a page, such as red is the top color at the first 180 degrees, and green is the top color at the next peak.
please help me.
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from matplotlib.backends.backend_pdf import PdfPages
from datetime import datetime, timedelta
import numpy as np
import random
import os
from workalendar.asia import SouthKorea
import platform
if platform.system() == "Windows":
plt.rcParams["font.family"] = "Malgun Gothic"
elif platform.system() == "Darwin":
plt.rcParams["font.family"] = "AppleGothic"
elif platform.system() == "Linux":
plt.rcParams["font.family"] = "NanumGothic"
DESKTOP_PATH = os.path.join(os.path.expanduser("~"), "Desktop")
cal = SouthKorea()
def is_holiday(date):
return cal.is_holiday(date) or date.weekday() == 6 or date.strftime("%m-%d") == "05-01"
def classify_date(target_date):
prev_day = target_date - timedelta(days=1)
next_day = target_date + timedelta(days=1)
if is_holiday(target_date):
return "Holiday"
elif is_holiday(prev_day) and not is_holiday(target_date):
return "Day After Holiday"
elif is_holiday(next_day) and not is_holiday(target_date):
return "Day Before Holiday"
else:
return "Normal"
def get_date_info():
start_date = datetime.strptime(input("Enter start date (YYYY-MM-DD): "), "%Y-%m-%d")
end_date = datetime.strptime(input("Enter end date (YYYY-MM-DD): "), "%Y-%m-%d")
date_list = []
current_date = start_date
while current_date <= end_date:
date_list.append((current_date.strftime("%Y-%m-%d"), classify_date(current_date)))
current_date += timedelta(days=1)
return date_list, start_date, end_date
previous_end_values = {f"CH{ch:02}": None for ch in range(1, 13)}
def generate_graph_data(date_type, part):
global previous_end_values
if date_type in ["Normal", "Holiday"] or (date_type == "Day After Holiday" and part == 18):
np.random.seed(42 + part)
random.seed(42 + part)
else:
np.random.seed(42)
random.seed(42)
time_intervals = [datetime(2024, 1, 1, part, 0) + timedelta(minutes=15 * i) for i in range(360)]
data = {"time": time_intervals}
if date_type == "Day Before Holiday":
if part == 0:
for ch in range(1, 5):
base_temp = 160 + (ch * 5)
normal_part = np.tile(np.linspace(base_temp - 5, base_temp + 5, 14), 20)
rise_part = np.linspace(base_temp, base_temp + 20, 14)
plateau_part = np.full(14, base_temp + 20)
decline_part = np.linspace(base_temp + 20, base_temp - 10, len(time_intervals) - len(normal_part) - len(rise_part) - len(plateau_part))
values = np.concatenate([normal_part, rise_part, plateau_part, decline_part])
values += np.random.uniform(-1, 1, len(values))
data[f"CH{ch:02}"] = values[:len(time_intervals)]
for ch in range(5, 13):
base_temp = 200
values = np.full(len(time_intervals), base_temp, dtype=float)
values[len(normal_part) + len(rise_part) + len(plateau_part):] = decline_part
values += np.random.uniform(-1, 1, len(time_intervals))
data[f"CH{ch:02}"] = values[:len(time_intervals)]
elif part == 6:
for ch in range(1, 13):
start_temp = 150
end_temp = 60
x = np.linspace(0, 1, len(time_intervals))
decline_curve = start_temp - (start_temp - end_temp) * np.sqrt(x)
decline_curve += np.random.uniform(-1, 1, len(time_intervals))
data[f"CH{ch:02}"] = decline_curve[:len(time_intervals)]
elif part == 12:
start_temp = 60
end_temp = 30
decline_curve = np.linspace(start_temp, end_temp, len(time_intervals))
for ch in range(1, 13):
noise = np.random.uniform(-0.5, 0.5, len(time_intervals))
data[f"CH{ch:02}"] = decline_curve + noise
elif part == 18:
for ch in range(1, 13):
start_temp = 30
mid_temp = 15
end_temp = 15
x = np.linspace(0, 1, len(time_intervals))
transition = start_temp - (start_temp - mid_temp) * x
noise = np.zeros(len(time_intervals))
noise[int(len(time_intervals) * 0.6):] = np.random.uniform(-1, 1, len(time_intervals) - int(len(time_intervals) * 0.6))
final_curve = transition + noise
data[f"CH{ch:02}"] = final_curve
elif date_type == "Holiday":
for ch in range(1, 13):
noise = np.random.uniform(-1, 1, len(time_intervals))
data[f"CH{ch:02}"] = np.full(len(time_intervals), 15) + noise
elif date_type == "Day After Holiday" and part == 0:
for ch in range(1, 13):
noise = np.random.uniform(-1, 1, len(time_intervals))
data[f"CH{ch:02}"] = np.full(len(time_intervals), 15) + noise
elif date_type == "Day After Holiday" and part == 6:
for ch in range(1, 13):
base_temp = 15
final_temp = 200
warmup_start = int(len(time_intervals) * 0.55)
warmup_end = int(len(time_intervals) * 0.67)
temp_curve = np.full(warmup_start, base_temp) + np.random.uniform(-1, 1, warmup_start)
x = np.linspace(0, 1, warmup_end - warmup_start)
rise_curve = base_temp + (final_temp - base_temp) * (3 * x ** 2 - 2 * x ** 3)
rise_curve += np.random.uniform(-2, 2, warmup_end - warmup_start)
plateau_curve = np.full(len(time_intervals) - warmup_end, final_temp) + np.random.uniform(-2, 2, len(time_intervals) - warmup_end)
final_curve = np.concatenate([temp_curve, rise_curve, plateau_curve])
data[f"CH{ch:02}"] = final_curve
elif date_type == "Day After Holiday" and part == 12:
base_temp = 200
time_split = int(len(time_intervals) * 0.45)
for ch in range(1, 13):
stable_part = np.full(time_split, base_temp) + np.random.uniform(-2, 2, time_split)
if ch in range(1, 5):
cycle_count = (len(time_intervals) - time_split) // 14
normal_variation = np.tile(np.linspace(150, 180, 14), cycle_count)[:len(time_intervals) - time_split]
normal_variation += np.random.uniform(-3, 3, len(normal_variation))
final_curve = np.concatenate([stable_part, normal_variation])
else:
stable_part_2 = np.full(len(time_intervals) - time_split, base_temp) + np.random.uniform(-2, 2, len(time_intervals) - time_split)
final_curve = np.concatenate([stable_part, stable_part_2])
final_curve = np.pad(final_curve, (0, max(0, 360 - len(final_curve))), 'edge')
data[f"CH{ch:02}"] = final_curve
else:
for ch in range(1, 5):
base_temp = 160 + (ch * 5)
recovery_curve = np.concatenate([np.full(1, base_temp - 25), np.linspace(base_temp - 25, base_temp, 14) + np.random.uniform(-2, 2, 14)])
values = np.tile(recovery_curve, 24)[:len(time_intervals)]
data[f"CH{ch:02}"] = values
for ch in range(5, 13):
values = np.random.normal(200, 1.2, len(time_intervals))
data[f"CH{ch:02}"] = values
for ch in range(1, 13):
key = f"CH{ch:02}"
if key not in data:
data[f"CH{ch:02}"] = np.full(len(time_intervals), 15)
return data
def plot_graph(data, date_str, part, date_type):
fig, ax = plt.subplots(figsize=(10, 6))
title_text = f"Record Date: {date_str} {part:02}:00 ~ {(part + 6) % 24:02}:00"
plt.text(0.02, 1.25, title_text, transform=ax.transAxes, fontsize=12, fontweight='bold', ha='left')
for ch in range(1, 13):
ax.plot(data["time"], data[f"CH{ch:02}"], label=f"CH{ch:02} [.C]", linewidth=0.8)
ax.set_xlabel("")
ax.set_ylabel("Temperature (℃)")
ax.grid(axis="y")
ax.set_yticks(np.arange(0, 301, 30))
ax.set_xticks([])
ax.legend(loc="upper left", bbox_to_anchor=(0, 1.14), ncol=6, fontsize=10, frameon=False)
ax.margins(x=0)
ax.set_xlim(min(data["time"]), max(data["time"]))
x_min, x_max = ax.get_xlim()
x_positions = np.linspace(x_min, x_max, 13)
for x in x_positions:
ax.axvline(x=x, color='gray', linestyle='--', linewidth=0.5)
time_labels = [f"{(part + i // 2) % 24:02}:{'00' if i % 2 == 0 else '30'}" for i in range(13)]
for x, label in zip(x_positions, time_labels):
ax.text(x, -20, label, fontsize=9, ha='center', transform=ax.transData)
return plt
def save_graphs_as_pdf(date_list, start_date, end_date):
print("Creating graphs...")
pdf_filename = os.path.join(DESKTOP_PATH, f"{start_date.strftime('%Y-%m-%d')}~{end_date.strftime('%Y-%m-%d')}.pdf")
with PdfPages(pdf_filename) as pdf:
for date_str, date_type in date_list:
for part in range(0, 24, 6):
data = generate_graph_data(date_type, part)
plt_obj = plot_graph(data, date_str, part, date_type)
pdf.savefig(plt_obj.gcf(), bbox_inches='tight', dpi=300)
plt_obj.close()
print("Graph creation complete!")
print(f"PDF saved: {pdf_filename}")
date_list, start_date, end_date = get_date_info()
save_graphs_as_pdf(date_list, start_date, end_date)
I've asked other AIs like google, chatGPT, deepseek, claude, and others, but none of them have been able to solve those two.
版权声明:本文标题:Implementing Continuous Graph Segments and In-Page Randomness in a Python Graphing Program - Stack Overflow 内容由网友自发贡献,该文观点仅代表作者本人, 转载请联系作者并注明出处:http://www.betaflare.com/web/1741676837a2391929.html, 本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌抄袭侵权/违法违规的内容,一经查实,本站将立刻删除。
发表评论