\displaystyle \textbf{1.}\ \text{If a random experiment is performed, then each of its outcomes is known} \\ \text{as an elementary event.}

\displaystyle \textbf{2.}\ \text{The set of all possible outcomes of a random experiment is called the sample} \\ \text{space associated with it and is generally denoted by }S.
\displaystyle \text{In other words, the set of all elementary events associated to a random experiment is called} \\ \text{its sample space.}

\displaystyle \textbf{3.}\ \text{A subset of the sample space associated to a random experiment is said to define} \\ \text{a compound event if it is the disjoint union of single element subsets of the sample space.}

\displaystyle \textbf{4.}\ \text{An event }A\text{ associated to a random experiment is said to occur if any one} \\ \text{of the elementary events associated to it is an outcome.}
\displaystyle \text{Thus, if an elementary event }E\text{ is an outcome of a random experiment and }A\text{ is an} \\ \text{event such that }E\in A,\text{ then we say that the event }A\text{ has occurred.}

\displaystyle \textbf{5.}\ \text{Corresponding to every event }A\text{ associated with a random experiment we define} \\ \text{an event ``not }A\text{'' which occurs when and only when }A\text{ does not occur.}

\displaystyle \textbf{6.}\ \text{Let }S\text{ be the sample space associated with a random experiment and }A\text{ be} \\ \text{an event associated to the experiment.}
\displaystyle \text{Then elementary events belonging to }A\text{ are known as favourable elementary events} \\ \text{to the event }A.
\displaystyle \text{In other words, an elementary event is said to be favourable to} \\ \text{an event }A\text{ if the occurrence} \text{of it ensures the happening or occurrence of }A.

\displaystyle \textbf{7.}\ \text{If there are }n\text{ elementary events associated with a random experiment and } \\ m\text{ of them are favourable to an event }A,\text{ then the probability of occurrence of } \\ A\text{ is denoted by } P(A)\text{ and is defined}\text{as the ratio }\frac{m}{n}.
\displaystyle \text{If }P(A)=1,\text{ then }A\text{ is called a certain event and if }P(A)=0,\text{ then }A\text{ is} \\ \text{called an impossible event.}
\displaystyle \text{The number of elementary events which will ensure the non-occurrence of } \\ A\text{ is }(n-m).
\displaystyle \therefore P(\overline{A})=\frac{n-m}{n}\Rightarrow P(A)=1-\frac{n-m}{n}\Rightarrow P(A)=1-P(\overline{A})\Rightarrow P(A)+P(\overline{A})=1.
\displaystyle \text{The odds in favour of the occurrence of }A\text{ are defined as }m:(n-m), \\ \text{ i.e. }P(A):P(\overline{A}),
\displaystyle \text{and the odds against the occurrence of }A\text{ are defined as }(n-m):m, \\ \text{ i.e. }P(\overline{A}):P(A).

\displaystyle \textbf{8.}\ \text{Two or more events associated to a random experiment are mutually exclusive} \\ \text{if the occurrence of one of them prevents the occurrence of all others.}
\displaystyle \text{It follows from the above definition that two or more events associated to a random} \\ \text{experiment are mutually exclusive if there is no elementary event which is} \\ \text{favourable to all the events.}
\displaystyle \text{Thus, if two events }A\text{ and }B\text{ are mutually exclusive, then }P(A\cap B)=0.
\displaystyle \text{Similarly, if }A,B\text{ and }C\text{ are mutually exclusive events,then }P(A\cap B\cap C)=0.
\displaystyle \text{Clearly, all elementary events associated to a random experiment are mutually exclusive as} \\ \text{no two or more of them can occur together.}

\displaystyle \textbf{9.}\ \text{Two or more events associated to a random experiment are exhaustive if their} \text{union is the} \\ \text{sample space, i.e. if }A_{1},A_{2},\ldots,A_{n}\text{ are events associated to a random experiment with sample} \\ \text{space }S,\text{ then }A_{1}\cup A_{2}\cup\cdots\cup A_{n}=S.
\displaystyle \text{All elementary events associated to a random experiment form a system of mutually exclusive} \\ \text{and exhaustive events.}
\displaystyle \text{For any event }A\text{ associated to a random experiment, }A\text{ and }\overline{A}\text{ form} \text{a pair of exhaustive} \\ \text{and mutually exclusive events.}

\displaystyle \textbf{10.}\ \text{Two events }A\text{ and }B\text{ associated to a random experiment are independent if the probability} \\ \text{of occurrence or non-occurrence of }A\text{ is not affected by the occurrence or non-occurrence of }B.
\displaystyle \text{Three or more events are independent if the probability of occurrence or non-occurrence} \\ \text{of any one of them is not affected by the occurrence or non-occurrence of others.}
\displaystyle \text{Events associated to independent random experiments are always independent.}
\displaystyle \text{If }A\text{ and }B\text{ are two mutually exclusive events associated to a random experiment, then the} \\ \text{occurrence of any one of these two prevents the occurrence of the other, i.e. if } \\ A\text{ occurs, then }P(B)=0.
\displaystyle \text{If }B\text{ occurs, then }P(A)=0.
\displaystyle \text{It follows from this that mutually exclusive events associated to a random experiment are} \\ \text{not independent and vice-versa.}

\displaystyle \textbf{11. (i)}\ \text{If }A\text{ and }B\text{ are two events associated with a random experiment, then}
\displaystyle P(A\cup B)=P(A)+P(B)-P(A\cap B).
\displaystyle \textbf{(ii)}\ \text{If }A\text{ and }B\text{ are mutually exclusive events, then}
\displaystyle P(A\cap B)=0.
\displaystyle P(A\cup B)=P(A)+P(B).

\displaystyle \textbf{12. (i)}\ \text{If }A,B,C\text{ are three events associated with a random experiment, then}
\displaystyle P(A\cup B\cup C)=P(A)+P(B)+P(C)-P(A\cap B)-P(B\cap C)-P(A\cap C)+P(A\cap B\cap C).
\displaystyle \textbf{(ii)}\ \text{If }A,B,C\text{ are mutually exclusive events, then}
\displaystyle P(A\cap B)=P(B\cap C)=P(A\cap C)=P(A\cap B\cap C)=0.
\displaystyle P(A\cup B\cup C)=P(A)+P(B)+P(C).

\displaystyle \textbf{13.}\ \text{Let }A\text{ and }B\text{ be two events associated to a random experiment. Then,}
\displaystyle P(A\cap B)=P(B)-P(A\cap B).
\displaystyle P(A\cap \overline{B})=P(A)-P(A\cap B).
\displaystyle P((A\cap \overline{B})\cup(\overline{A}\cap B))=P(A)+P(B)-2P(A\cap B).
\displaystyle P(\overline{A}\cap B)\text{ is known as the probability of occurrence of }B\text{ only.}
\displaystyle P(A\cap \overline{B})\text{ is known as the probability of occurrence of }A\text{ only.}
\displaystyle P((A\cap \overline{B})\cup(\overline{A}\cap B))\text{ is known as the probability of occurrence }\text{of exactly one of the} \\ \text{two events }A\text{ and }B.

\displaystyle \textbf{14.}\ \text{For any two events }A\text{ and }B\text{ the probability that exactly one of } A,B\text{ occurs} \\ \text{is given by}
\displaystyle P(A)+P(B)-2P(A\cap B)=P(A\cup B)-P(A\cap B).

\displaystyle \textbf{15.}\ \text{If }A,B,C\text{ are three events, then}
\displaystyle \textbf{(i)}\ P(\text{at least two of }A,B,C\text{ occur})=P(A\cap B)+P(B\cap C)+P(C\cap A)-2P(A\cap B\cap C).
\displaystyle \textbf{(ii)}\ P(\text{exactly two of }A,B,C\text{ occur})=P(A\cap B)+P(B\cap C)+P(A\cap C)-3P(A\cap B\cap C).
\displaystyle \textbf{(iii)}\ P(\text{exactly one of }A,B,C\text{ occurs})=P(A)+P(B)+P(C)-2P(A\cap B)-2P(B\cap C)-2P(A\cap C)+3P(A\cap B\cap C).

\displaystyle \textbf{16. (i)}\ \text{Let }A\text{ and }B\text{ be two events associated to a random experiment. Then the} \\ \text{probability of occurrence of }A\text{ under the condition that }B\text{ has already occurred} \\ P(B)\neq0\text{and is called conditional probability and is denoted by }P(A/B).
\displaystyle \text{Thus, }P(A/B)=\text{probability of occurrence of }A\text{ given that }B\text{ has already} \\ \text{occurred.}
\displaystyle \text{Similarly, }P(B/A)\text{ when }P(A)\neq0\text{ is defined as the probability of occurrence of } \\ B\text{ when }A\text{ has already occurred.}
\displaystyle \text{In fact, the meanings of the symbols }P(A/B)\text{ and }P(B/A)\text{ depend on the nature} \\ \text{of the events }A\text{ and }B\text{ and also on the nature of the random experiment.}
\displaystyle P(B/A)=\text{probability of occurrence of }A\text{ when }B\text{ occurs.}
\displaystyle \text{OR}
\displaystyle P(A/B)=\text{probability of occurrence of }A\text{ when }B\text{ is taken as the sample space.}
\displaystyle \text{OR}
\displaystyle P(A/B)=\text{probability of occurrence of }A\text{ with respect to }B.
\displaystyle \text{and,}
\displaystyle P(B/A)=\text{probability of occurrence of }B\text{ when }A\text{ occurs.}
\displaystyle \text{OR}
\displaystyle P(B/A)=\text{probability of occurrence of }B\text{ when }A\text{ is taken as the sample space.}
\displaystyle \text{OR}
\displaystyle P(B/A)=\text{probability of occurrence of }B\text{ with respect to }A.
\displaystyle \text{If }A\text{ and }B\text{ are independent events associated with a random experiment, then}
\displaystyle P(A/B)=P(A)\text{ and }P(B/A)=P(B).

\displaystyle \textbf{17.}\ \text{If }A\text{ and }B\text{ are two events associated with a random experiment, then}
\displaystyle P(A\cap B)=P(A)\,P(B/A),\ \text{if }P(A)\neq0.
\displaystyle \text{or, }P(A\cap B)=P(B)\,P(A/B),\ \text{if }P(B)\neq0.

\displaystyle \textbf{18.}\ \text{If }A\text{ and }B\text{ are independent events, then }P(A/B)=P(A)\text{ and } \\ P(B/A)=P(B).
\displaystyle \therefore P(A\cap B)=P(A)\,P(B).
\displaystyle \text{Also, }P(A\cup B)=1-P(\overline{A})\,P(\overline{B}).

\displaystyle \textbf{19.}\ \text{If }A_{1},A_{2},\ldots,A_{n}\text{ are }n\text{ events associated with a} \text{random experiment, then}
\displaystyle P(A_{1}\cap A_{2}\cap A_{3}\cap\cdots\cap A_{n})=P(A_{1})\,P(A_{2}/A_{1})\,P(A_{3}/A_{1}\cap A_{2})\cdots P(A_{n}/A_{1}\cap A_{2}\cap\cdots\cap A_{n-1}).
\displaystyle \text{where }P(A_{i}/A_{1}\cap A_{2}\cap\cdots\cap A_{i-1})\text{ represents the conditional} \text{probability of the occurrence of event }A_{i}.

\displaystyle \textbf{20.}\ \text{If }A\text{ and }B\text{ are independent events associated with a random} \\ \text{experiment, then}
\displaystyle \text{(i) }\overline{A}\text{ and }B\text{ are independent events.}
\displaystyle \text{(ii) }A\text{ and }\overline{B}\text{ are independent events.}
\displaystyle \text{(iii) }\overline{A}\text{ and }\overline{B}\text{ are also independent events.}

\displaystyle \textbf{21. (i)}\ \text{If }A\text{ and }B\text{ are independent events associated with a random} \text{experiment, then}
\displaystyle P(A\cup B)=1-P(\overline{A})\,P(\overline{B}).
\displaystyle \textbf{(ii)}\ \text{If }A_{1},A_{2},\ldots,A_{n}\text{ are independent events associated with a} \text{random experiment, then}
\displaystyle P(A_{1}\cup A_{2}\cup\cdots\cup A_{n})=1-P(\overline{A}_{1})\,P(\overline{A}_{2})\cdots P(\overline{A}_{n}).

\displaystyle \textbf{22.}\ \text{Let }S\text{ be the sample space and let }E_{1},E_{2},\ldots,E_{n}\text{ be } n\text{ mutually exclusive and exhaustive} \\ \text{events.}
\displaystyle \text{If } \text{If } A \text{ is any event which occurs with any of } E_{1}, E_{2}, \ldots, E_{n}, \text{ then}
\displaystyle P(A)=P(E_{1})P(A/E_{1})+P(E_{2})P(A/E_{2})+\cdots+P(E_{n})P(A/E_{n}).
\displaystyle \text{or, }P(A)=\sum_{r=1}^{n}P(E_{r})P(A/E_{r}).

\displaystyle \textbf{23.}\ \text{Let }S\text{ be the sample space and let }E_{1},E_{2},\ldots,E_{n}\text{ be } n\text{ mutually exclusive and} \\ \text{exhaustive events.}
\displaystyle \text{If }A\text{ is any event which occurs with }E_{1}\text{ or }E_{2}\text{ or }\cdots\text{ or }\\ E_{n},\text{ then}
\displaystyle P(E_{i}/A)=\frac{P(E_{i})P(A/E_{i})}{\sum_{i=1}^{n}P(E_{i})P(A/E_{i})},\quad i=1,2,\ldots,n.
\displaystyle \text{The events }E_{1},E_{2},\ldots,E_{n}\text{ are called hypotheses and the probabilities }P(E_{1}),P(E_{2}),\ldots,P(E_{n})
\displaystyle \text{are known as the prior probabilities.}
\displaystyle \text{The probabilities }P(A/E_{i}),\ i=1,2,\ldots,n,\text{ are called the likelihood probabilities.}
\displaystyle \text{The probabilities }P(E_{i}/A),\ i=1,2,\ldots,n,\text{ are called the posterior probabilities.}


Discover more from ICSE / ISC / CBSE Mathematics Portal for K12 Students

Subscribe to get the latest posts sent to your email.