perf: 产品管理: 最小人等: 0=1
parent
8bc32b199b
commit
cdb384e59d
@ -1,139 +0,0 @@
|
||||
import { flush, groupBy, isEmpty, isNotEmpty, unique, uniqWith } from '@/utils/commons';
|
||||
import dayjs from 'dayjs';
|
||||
// Shoulder Season 平季; peak season 旺季
|
||||
const isFullYearOrLonger = (year, startDate, endDate) => {
|
||||
// Parse the dates
|
||||
const start = dayjs(startDate, 'YYYY-MM-DD');
|
||||
const end = dayjs(endDate, 'YYYY-MM-DD');
|
||||
|
||||
// Create the start and end dates for the year
|
||||
const yearStart = dayjs(`${year}-01-01`, 'YYYY-MM-DD');
|
||||
const yearEnd = dayjs(`${year}-12-31`, 'YYYY-MM-DD');
|
||||
|
||||
// Check if start is '01-01' and end is '12-31' and the year matches
|
||||
const isFullYear = start.isSame(yearStart, 'day') && end.isSame(yearEnd, 'day');
|
||||
|
||||
// Check if the range is longer than a year
|
||||
const isLongerThanYear = end.diff(start, 'year') >= 1;
|
||||
|
||||
return isFullYear || isLongerThanYear;
|
||||
};
|
||||
|
||||
const uniqueBySub = (arr) =>
|
||||
arr.filter((subArr1, _, self) => {
|
||||
return !self.some((subArr2) => {
|
||||
if (subArr1 === subArr2) return false; // don't compare a subarray with itself
|
||||
const set1 = new Set(subArr1);
|
||||
const set2 = new Set(subArr2);
|
||||
// check if subArr1 is a subset of subArr2
|
||||
return [...set1].every((value) => set2.has(value));
|
||||
});
|
||||
});
|
||||
export const chunkBy = (use_year, dataList = [], by = []) => {
|
||||
const dataRollSS = dataList.map((rowp, ii) => {
|
||||
const quotation = rowp.quotation.map((quoteItem) => {
|
||||
return {
|
||||
...quoteItem,
|
||||
quote_season: isFullYearOrLonger(use_year, quoteItem.use_dates_start, quoteItem.use_dates_end) ? 'SS' : 'PS',
|
||||
};
|
||||
});
|
||||
return { ...rowp, quotation };
|
||||
});
|
||||
|
||||
// 人等分组只取平季, 因为产品只一行
|
||||
const allQuotesSS = dataRollSS.reduce((acc, rowp) => acc.concat(rowp.quotation.filter((q) => q.quote_season === 'SS')), []);
|
||||
|
||||
const allQuotesPS = dataRollSS.reduce((acc, rowp) => acc.concat(rowp.quotation.filter((q) => q.quote_season === 'PS')), []);
|
||||
const allQuotesSSS = isEmpty(allQuotesSS) ? allQuotesPS : allQuotesSS;
|
||||
|
||||
const PGroupSizeSS = allQuotesSSS.reduce((aq, cq) => {
|
||||
aq[cq.WPI_SN] = aq[cq.WPI_SN] || [];
|
||||
aq[cq.WPI_SN].push(cq.group_size_min);
|
||||
aq[cq.WPI_SN] = unique(aq[cq.WPI_SN]);
|
||||
aq[cq.WPI_SN] = aq[cq.WPI_SN].slice().sort((a, b) => a - b);
|
||||
return aq;
|
||||
}, {});
|
||||
|
||||
const maxGroupSize = Math.max(...allQuotesSSS.map((q) => q.group_size_max));
|
||||
const maxSet = maxGroupSize === 1000 ? Infinity : maxGroupSize;
|
||||
|
||||
const _SSMinSet = uniqWith(Object.values(PGroupSizeSS), (a, b) => a.join(',') === b.join(','));
|
||||
// const uSSsizeSetArr = (_SSMinSet)
|
||||
const uSSsizeSetArr = uniqueBySub(_SSMinSet);
|
||||
|
||||
// * 若不重叠分组, 则上面不要 uniqueBySub
|
||||
for (const key in PGroupSizeSS) {
|
||||
if (Object.prototype.hasOwnProperty.call(PGroupSizeSS, key)) {
|
||||
const element = PGroupSizeSS[key];
|
||||
const findSet = uSSsizeSetArr.find((minCut) => element.every((v) => minCut.includes(v)));
|
||||
PGroupSizeSS[key] = findSet;
|
||||
}
|
||||
}
|
||||
|
||||
const [SSsizeSets, PSsizeSets] = [uSSsizeSetArr, []].map((arr) => {
|
||||
const _arr = structuredClone(arr);
|
||||
const arrSets = _arr.map((keyMins) =>
|
||||
keyMins.reduce((acc, curr, idx, minsArr) => {
|
||||
const _max = idx === minsArr.length - 1 ? maxSet : Number(minsArr[idx + 1]) - 1;
|
||||
acc.push([Number(curr), _max]);
|
||||
return acc;
|
||||
}, [])
|
||||
);
|
||||
return arrSets;
|
||||
});
|
||||
|
||||
const compactSizeSets = {
|
||||
SSsizeSetKey: uSSsizeSetArr.map((s) => s.join(',')).filter(isNotEmpty),
|
||||
sizeSets: SSsizeSets,
|
||||
};
|
||||
|
||||
const chunkSS = structuredClone(dataRollSS).map((rowp) => {
|
||||
const pkey = (PGroupSizeSS[rowp.info.id] || []).join(',') || compactSizeSets.SSsizeSetKey[0]; // todo:
|
||||
|
||||
const thisRange = (PGroupSizeSS[rowp.info.id] || []).reduce((acc, curr, idx, minsArr) => {
|
||||
const _max = idx === minsArr.length - 1 ? maxSet : Number(minsArr[idx + 1]) - 1;
|
||||
acc.push([Number(curr), _max]);
|
||||
return acc;
|
||||
}, []);
|
||||
const _quotation = rowp.quotation.map((quoteItem) => {
|
||||
const ssSets = isEmpty(thisRange) ? SSsizeSets[0] : structuredClone(thisRange).reverse();
|
||||
|
||||
const matchRange = ssSets.find((ss) => quoteItem.group_size_min >= ss[0] && quoteItem.group_size_max <= ss[1]);
|
||||
const findEnd = matchRange || ssSets.find((ss) => quoteItem.group_size_max > ss[0] && quoteItem.group_size_max <= ss[1] && ss[1] !== Infinity);
|
||||
const findStart = findEnd || ssSets.find((ss) => quoteItem.group_size_min >= ss[0]);
|
||||
const finalRange = findStart || ssSets[0];
|
||||
|
||||
quoteItem.quote_size = finalRange.join('-');
|
||||
return quoteItem;
|
||||
});
|
||||
const quote_chunk_flat = groupBy(_quotation, (quoteItem2) => by.map((key) => quoteItem2[key]).join('@'));
|
||||
const quote_chunk = Object.keys(quote_chunk_flat).reduce((qc, ckey) => {
|
||||
const ckeyArr = ckey.split('@');
|
||||
if (isEmpty(qc[ckeyArr[0]])) {
|
||||
qc[ckeyArr[0]] = ckeyArr[1] ? { [ckeyArr[1]]: quote_chunk_flat[ckey] } : quote_chunk_flat[ckey];
|
||||
} else {
|
||||
qc[ckeyArr[0]][ckeyArr[1]] = (qc[ckeyArr[0]][ckeyArr[1]] || []).concat(quote_chunk_flat[ckey]);
|
||||
}
|
||||
return qc;
|
||||
}, {});
|
||||
return {
|
||||
...rowp,
|
||||
sizeSetsSS: pkey,
|
||||
quotation: _quotation,
|
||||
quote_chunk,
|
||||
};
|
||||
});
|
||||
|
||||
const allquotation = chunkSS.reduce((a, c) => a.concat(c.quotation), []);
|
||||
// 取出两季相应的时效区间
|
||||
const SSRange = unique((allquotation || []).filter((q) => q.quote_season === 'SS').map((qr) => `${qr.use_dates_start}~${qr.use_dates_end}`));
|
||||
const PSRange = unique((allquotation || []).filter((q) => q.quote_season === 'PS').map((qr) => `${qr.use_dates_start}~${qr.use_dates_end}`));
|
||||
|
||||
return {
|
||||
chunk: chunkSS,
|
||||
dataSource: chunkSS,
|
||||
SSRange,
|
||||
PSRange,
|
||||
...compactSizeSets,
|
||||
};
|
||||
};
|
||||
Loading…
Reference in New Issue