2024 D2P2 (I hate this)

This commit is contained in:
2025-11-03 22:11:31 +00:00
parent 37cb5f5735
commit d2efdef91f
3 changed files with 75 additions and 37 deletions

BIN
2024/aoc

Binary file not shown.

View File

@@ -96,16 +96,24 @@ public:
// Access a chunk of tokens for a given line // Access a chunk of tokens for a given line
// E.g. Iterate [1,2], [2,3], [3,4] // E.g. Iterate [1,2], [2,3], [3,4]
std::vector<std::vector<FileFragment>> ChunkView(int line, size_t n, size_t gap) { std::vector<std::vector<FileFragment>> ChunkView(int line, size_t size, size_t stride)
std::vector<FileFragment> v = _tokens[line]; {
const auto& v = _tokens[line];
std::vector<std::vector<FileFragment>> chunks; std::vector<std::vector<FileFragment>> chunks;
for (size_t i = 0; i < v.size() - 1; i += gap) {
chunks.emplace_back(v.begin() + i, if (v.empty() || stride == 0)
v.begin() + std::min(v.size(), i + n)); return chunks;
for (size_t i = 0; i + size <= v.size(); i += stride)
{
std::cout << "max " << i + size << " tokens " << v.size() << " index " << i << " size " << size << " stride " << stride << std::endl;
chunks.emplace_back(v.begin() + i, v.begin() + i + size);
} }
return chunks; return chunks;
} }
/// Iterate through all lines and their tokens /// Iterate through all lines and their tokens
auto begin() const { return _tokens.begin(); } auto begin() const { return _tokens.begin(); }
auto end() const { return _tokens.end(); } auto end() const { return _tokens.end(); }

View File

@@ -7,50 +7,80 @@ public:
~Day02() {} ~Day02() {}
int Day() override {return 2;} int Day() override {return 2;}
bool isSafe(const std::vector<int>& levels)
{
if (levels.size() < 2)
return true;
int dir = (levels[1] - levels[0] > 0) ? 1 : -1;
for (size_t i = 1; i < levels.size(); ++i)
{
int diff = levels[i] - levels[i - 1];
if (diff == 0)
return false;
if (std::abs(diff) > 3)
return false;
if ((diff > 0 ? 1 : -1) != dir)
return false;
}
return true;
}
bool isSafeWithDampener(const std::vector<int>& levels)
{
// Already safe
if (isSafe(levels))
return true;
// BRUTE FORCE
for (size_t i = 0; i < levels.size(); ++i)
{
std::vector<int> copy = levels;
copy.erase(copy.begin() + i);
if (isSafe(copy))
return true;
}
return false;
}
int PartOne(File& f) override int PartOne(File& f) override
{ {
f.SplitBy(" "); f.SplitBy(" ");
int result_bad = 0; int safe = 0;
for (int i = 0; i < f.Lines().size(); i++) for (int i = 0; i < f.Lines().size(); ++i)
{ {
bool lastDirection; std::vector<int> nums;
bool first = true; for (auto& token : f.TokensForLine(i))
for (const auto& tokenPair : f.ChunkView(i, 2, 1)) nums.push_back(std::atoi(token.Data.c_str()));
{
int token = std::atoi(tokenPair[0].Data.c_str());
int nextToken = std::atoi(tokenPair[1].Data.c_str());
int diff = token - nextToken; if (isSafe(nums))
bool direction = diff > 0; safe++;
if (first)
{
lastDirection = direction;
first = false;
}
if (std::abs(diff) > 3
|| token == nextToken
|| direction != lastDirection)
{
result_bad++;
break;
}
lastDirection = direction;
first = false;
}
} }
int result = f.Lines().size() - result_bad; return safe;
return result;
} }
int PartTwo(File&) override int PartTwo(File& f) override
{ {
int safe = 0;
for (int i = 0; i < f.Lines().size(); ++i)
{
std::vector<int> nums;
for (auto& token : f.TokensForLine(i))
nums.push_back(std::atoi(token.Data.c_str()));
if (isSafeWithDampener(nums))
safe++;
}
return safe;
} }
}; };